hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
600c33aaf22eaf408ee066ae230204fd2fd78503
| 124
|
py
|
Python
|
emails/__init__.py
|
Vipul-Bajaj/py-emails
|
a4fb1b2de82888c11605e0a70e8de1dd0ed7a98a
|
[
"MIT"
] | 2
|
2021-03-18T11:40:09.000Z
|
2021-03-23T23:26:34.000Z
|
emails/__init__.py
|
Vipul-Bajaj/py-emails
|
a4fb1b2de82888c11605e0a70e8de1dd0ed7a98a
|
[
"MIT"
] | 2
|
2021-10-12T03:03:09.000Z
|
2021-10-13T05:27:15.000Z
|
emails/__init__.py
|
Vipul-Bajaj/py-emails
|
a4fb1b2de82888c11605e0a70e8de1dd0ed7a98a
|
[
"MIT"
] | 1
|
2021-10-12T06:43:43.000Z
|
2021-10-12T06:43:43.000Z
|
from emails._src import Email, from_template, SmtpConfigNotProvidedError, MimeTypeNotSpecifiedError, InvalidSmtpConfigError
| 62
| 123
| 0.895161
| 10
| 124
| 10.9
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 124
| 1
| 124
| 124
| 0.939655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
60886befaf773b57ee5608dd6c44b73c6c26b168
| 15,735
|
py
|
Python
|
embyapi/api/connect_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/connect_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/connect_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class ConnectServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_users_by_id_connect_link(self, id, **kwargs): # noqa: E501
"""Removes a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_connect_link(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_users_by_id_connect_link_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_users_by_id_connect_link_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_users_by_id_connect_link_with_http_info(self, id, **kwargs): # noqa: E501
"""Removes a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_connect_link_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_users_by_id_connect_link" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_users_by_id_connect_link`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Connect/Link', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_connect_exchange(self, connect_user_id, **kwargs): # noqa: E501
"""Gets the corresponding local user from a connect user id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_connect_exchange(connect_user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str connect_user_id: ConnectUserId (required)
:return: ConnectConnectAuthenticationExchangeResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_connect_exchange_with_http_info(connect_user_id, **kwargs) # noqa: E501
else:
(data) = self.get_connect_exchange_with_http_info(connect_user_id, **kwargs) # noqa: E501
return data
def get_connect_exchange_with_http_info(self, connect_user_id, **kwargs): # noqa: E501
"""Gets the corresponding local user from a connect user id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_connect_exchange_with_http_info(connect_user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str connect_user_id: ConnectUserId (required)
:return: ConnectConnectAuthenticationExchangeResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['connect_user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_connect_exchange" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'connect_user_id' is set
if ('connect_user_id' not in params or
params['connect_user_id'] is None):
raise ValueError("Missing the required parameter `connect_user_id` when calling `get_connect_exchange`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'connect_user_id' in params:
query_params.append(('ConnectUserId', params['connect_user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Connect/Exchange', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ConnectConnectAuthenticationExchangeResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_connect_pending(self, **kwargs): # noqa: E501
"""Creates a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_connect_pending(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_connect_pending_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_connect_pending_with_http_info(**kwargs) # noqa: E501
return data
def get_connect_pending_with_http_info(self, **kwargs): # noqa: E501
"""Creates a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_connect_pending_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_connect_pending" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Connect/Pending', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_connect_link(self, id, connect_username, **kwargs): # noqa: E501
"""Creates a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_connect_link(id, connect_username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str connect_username: Connect username (required)
:return: ConnectUserLinkResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_connect_link_with_http_info(id, connect_username, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_connect_link_with_http_info(id, connect_username, **kwargs) # noqa: E501
return data
def post_users_by_id_connect_link_with_http_info(self, id, connect_username, **kwargs): # noqa: E501
"""Creates a Connect link for a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_connect_link_with_http_info(id, connect_username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str connect_username: Connect username (required)
:return: ConnectUserLinkResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'connect_username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_connect_link" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_connect_link`") # noqa: E501
# verify the required parameter 'connect_username' is set
if ('connect_username' not in params or
params['connect_username'] is None):
raise ValueError("Missing the required parameter `connect_username` when calling `post_users_by_id_connect_link`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'connect_username' in params:
query_params.append(('ConnectUsername', params['connect_username'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Connect/Link', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ConnectUserLinkResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.756158
| 140
| 0.619638
| 1,832
| 15,735
| 5.045306
| 0.090611
| 0.047604
| 0.026723
| 0.029428
| 0.923293
| 0.912582
| 0.884345
| 0.864546
| 0.857838
| 0.843341
| 0
| 0.015771
| 0.29482
| 15,735
| 405
| 141
| 38.851852
| 0.817231
| 0.333079
| 0
| 0.741784
| 1
| 0
| 0.189509
| 0.057226
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042254
| false
| 0
| 0.018779
| 0
| 0.122066
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
608e7eec3bc0f28122f13e3319d83563a883f8e0
| 34,604
|
py
|
Python
|
Packs/PassiveTotal/Integrations/PassiveTotal_v2/PassiveTotal_v2_test.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 2
|
2020-07-27T10:35:41.000Z
|
2020-12-14T15:44:18.000Z
|
Packs/PassiveTotal/Integrations/PassiveTotal_v2/PassiveTotal_v2_test.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 48
|
2022-03-08T13:45:00.000Z
|
2022-03-31T14:32:05.000Z
|
Packs/PassiveTotal/Integrations/PassiveTotal_v2/PassiveTotal_v2_test.py
|
matan-xmcyber/content
|
7f02301c140b35956af3cd20cb8dfc64f34afb3e
|
[
"MIT"
] | 2
|
2020-12-10T12:02:45.000Z
|
2020-12-15T09:20:01.000Z
|
import json
from unittest import mock
from unittest.mock import patch
import pytest
from requests.exceptions import MissingSchema, InvalidSchema, ConnectionError
import demistomock as demisto
from CommonServerPython import DemistoException
MOCK_URL = 'http://123-fake-api.com'
SSL_ARGS = {
'field': 'serialNumber',
'query': 'dummy serial number'
}
PDNS_ARGS = {
'query': 'dummy domain',
'start': '2020-01-01 00:00:00',
'end': '2020-01-31'
}
HOST_ATTRIBUTE_ARGS = {
'component_by_domain': {
'query': 'dummy domain',
'start': '2020-05-25 00:05:25'
},
'component_by_ip': {
'query': 'dummy ip',
'end': '2020-05-25 00:05:25'
},
'tracker_by_domain': {
'query': 'dummy domain',
'start': '2020-05-25 00:05:25'
},
'tracker_by_ip': {
'query': 'dummy ip',
'end': '2020-05-25 00:05:25'
},
'host_pair': {
'query': 'dummy child',
'direction': 'parents',
'start': '2020-05-25 00:05:25'
}
}
@pytest.fixture()
def client():
from PassiveTotal_v2 import Client
return Client(MOCK_URL, '10', False, False, ('USERNAME', 'API_SECRET'))
def mock_http_response(status=200, json_data=None, raise_for_status=None):
mock_resp = mock.Mock()
# mock raise_for_status call w/optional error
mock_resp.raise_for_status = mock.Mock()
if raise_for_status:
mock_resp.raise_for_status.side_effect = raise_for_status
# set status code
mock_resp.status_code = status
# add json data if provided
if json_data:
mock_resp.json = mock.Mock(
return_value=json_data
)
return mock_resp
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_authentication_error(mock_base_http_request, client):
"""
When http request return status code 401 then appropriate error message should display.
"""
# Configure
mock_base_http_request.return_value = mock_http_response(status=401)
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Unauthenticated. Check the configured Username and API secret.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_page_not_found_error(mock_base_http_request, client):
"""
When http request return status code 404 then appropriate error message should display.
"""
# Configure
mock_base_http_request.return_value = mock_http_response(status=404)
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'No record(s) found.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_proxy_error_based_on_status(mock_base_http_request, client):
"""
When http request return status code 407 then appropriate error message should display.
"""
# Configure
mock_base_http_request.return_value = mock_http_response(status=407)
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Proxy Error - cannot connect to proxy. Either try clearing the \'Use system proxy\' check-box or check ' \
'the host, authentication details and connection details for the proxy.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_internal_server_error(mock_base_http_request, client):
"""
When http request return status code 500 then appropriate error message should display.
"""
# Configure
mock_base_http_request.return_value = mock_http_response(status=500)
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'The server encountered an internal error for PassiveTotal and was unable to complete your request.' == str(
e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_raise_for_status(mock_base_http_request, client):
"""
When http request return invalid status code then appropriate error message should display.
"""
# Configure
mock_raise_for_status = mock.Mock()
mock_raise_for_status.return_value = None
mock_response = mock_http_response(status=300, raise_for_status=mock_raise_for_status)
mock_base_http_request.return_value = mock_response
# Execute
client.http_request('GET', '/test/url/suffix')
# Assert
assert mock_raise_for_status.called
def test_main_success(mocker, client):
"""
When main function called test function should call.
"""
import PassiveTotal_v2
mocker.patch.object(demisto, 'command', return_value='test-module')
mocker.patch.object(PassiveTotal_v2, 'test_function', return_value='ok')
PassiveTotal_v2.main()
assert PassiveTotal_v2.test_function.called
@patch('PassiveTotal_v2.return_error')
def test_main_failure(mock_return_error, capfd, mocker):
"""
When main function get some exception then valid message should be print.
"""
import PassiveTotal_v2
mocker.patch.object(demisto, 'command', return_value='test-module')
mocker.patch.object(PassiveTotal_v2, 'test_function', side_effect=Exception)
with capfd.disabled():
PassiveTotal_v2.main()
mock_return_error.assert_called_once_with('Error: ')
@patch('PassiveTotal_v2.Client.http_request')
def test_function_success(request_mocker, client):
"""
When success response come then test_function command should pass.
"""
from PassiveTotal_v2 import test_function
mock_response = {
'results': 0,
'domains': []
}
request_mocker.return_value = mock_response
assert test_function(client) == 'ok'
def test_request_timeout_success(mocker):
"""
When provided valid request timeout then test should be passed.
"""
from PassiveTotal_v2 import get_request_timeout
request_timeout = 5
params = {
'request_timeout': str(request_timeout)
}
mocker.patch.object(demisto, 'params', return_value=params)
get_request_timeout()
assert int(request_timeout) == request_timeout
def test_request_timeout_invalid_value(mocker):
"""
When provided invalid request timeout then display error message.
"""
from PassiveTotal_v2 import get_request_timeout
# Configure
request_timeout = 'invalid_str_value'
params = {
'request_timeout': str(request_timeout)
}
mocker.patch.object(demisto, 'params', return_value=params)
# Execute
with pytest.raises(ValueError) as e:
get_request_timeout()
# Assert
assert 'HTTP(S) Request timeout parameter must be a positive integer.' == str(e.value)
def test_request_timeout_failure(mocker):
"""
When invalid input provided for request timeout then appropriate error message should display.
"""
from PassiveTotal_v2 import get_request_timeout
request_timeout = -5
params = {
'request_timeout': str(request_timeout)
}
mocker.patch.object(demisto, 'params', return_value=params)
# Execute
with pytest.raises(ValueError) as e:
get_request_timeout()
assert 'HTTP(S) Request timeout parameter must be a positive integer.' == str(e.value)
def test_request_timeout_large_value_failure(mocker):
"""
When too large value provided for request timeout then raised value error and
appropriate error message should display.
"""
from PassiveTotal_v2 import get_request_timeout
request_timeout = 990000000000000000
params = {
'request_timeout': str(request_timeout)
}
mocker.patch.object(demisto, 'params', return_value=params)
# Execute
with pytest.raises(ValueError) as e:
get_request_timeout()
assert 'Value is too large for HTTP(S) Request Timeout.' == str(e.value)
def test_get_components_command_main_success(mocker, client):
"""
When "pt-get-components" command executes the get_components_command function should be called from main.
"""
import PassiveTotal_v2
mocker.patch.object(demisto, 'command', return_value='pt-get-components')
mocker.patch.object(PassiveTotal_v2, 'get_components_command',
return_value='No component(s) were found for the given argument(s).')
PassiveTotal_v2.main()
assert PassiveTotal_v2.get_components_command.called
@patch('PassiveTotal_v2.Client.http_request')
def test_get_components_command_domain_success(mocker_http_request, client):
"""
When "pt-get-components" command executes successfully for domain then context output and response should match.
"""
from PassiveTotal_v2 import get_components_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Component/component_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('successDomain')
mocker_http_request.return_value = expected_res
# Fetch the expected custom entry context from file
with open('test_data/HostAttribute/Component/component_ec.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_custom_ec = json_file.get('successDomain')
# Fetch the expected human readable details from file
with open('test_data/HostAttribute/Component/component_domain_hr.md') as f:
expected_hr = f.read()
result = get_components_command(client, HOST_ATTRIBUTE_ARGS['component_by_domain'])
assert result[0].raw_response == expected_res
assert result[0].outputs == expected_custom_ec
assert result[0].readable_output == expected_hr
assert result[0].outputs_key_field == ''
assert result[0].outputs_prefix == 'PassiveTotal.Component'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_components_command_ip_success(mocker_http_request, client):
"""
When "pt-get-components" command executes successfully for ip then context output and response should match.
"""
from PassiveTotal_v2 import get_components_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Component/component_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('successIP')
mocker_http_request.return_value = expected_res
# Fetch the expected custom entry context from file
with open('test_data/HostAttribute/Component/component_ec.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_custom_ec = json_file.get('successIP')
# Fetch the expected human readable details from file
with open('test_data/HostAttribute/Component/component_ip_hr.md') as f:
expected_hr = f.read()
result = get_components_command(client, HOST_ATTRIBUTE_ARGS['component_by_ip'])
assert result[0].raw_response == expected_res
assert result[0].outputs == expected_custom_ec
assert result[0].readable_output == expected_hr
assert result[0].outputs_key_field == ''
assert result[0].outputs_prefix == 'PassiveTotal.Component'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_components_command_no_record_found(mocker_http_request, client):
"""
When no records found from Components response then result string should match.
"""
from PassiveTotal_v2 import get_components_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Component/component_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('emptyContent')
mocker_http_request.return_value = expected_res
result = get_components_command(client, HOST_ATTRIBUTE_ARGS['component_by_domain'])
assert result == 'No component(s) were found for the given argument(s).'
def test_get_trackers_command_main_success(mocker, client):
"""
When "pt-get-trackers" command executes the get_trackers_command function should be called from main.
"""
import PassiveTotal_v2
mocker.patch.object(demisto, 'command', return_value='pt-get-trackers')
mocker.patch.object(PassiveTotal_v2, 'get_trackers_command',
return_value='No tracker(s) were found for the given argument(s).')
PassiveTotal_v2.main()
assert PassiveTotal_v2.get_trackers_command.called
@patch('PassiveTotal_v2.Client.http_request')
def test_get_trackers_command_domain_success(mocker_http_request, client):
"""
When "pt-get-trackers" command executes successfully for domain then context output and response should match.
"""
from PassiveTotal_v2 import get_trackers_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Tracker/tracker_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('successDomain')
mocker_http_request.return_value = expected_res
# Fetch the expected custom entry context from file
with open('test_data/HostAttribute/Tracker/tracker_ec.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_custom_ec = json_file.get('successDomain')
# Fetch the expected human readable details from file
with open('test_data/HostAttribute/Tracker/tracker_domain_hr.md') as f:
expected_hr = f.read()
result = get_trackers_command(client, HOST_ATTRIBUTE_ARGS['tracker_by_domain'])
assert result[0].raw_response == expected_res
assert result[0].outputs == expected_custom_ec
assert result[0].readable_output == expected_hr
assert result[0].outputs_key_field == ''
assert result[0].outputs_prefix == 'PassiveTotal.Tracker'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_trackers_command_ip_success(mocker_http_request, client):
"""
When "pt-get-trackers" command executes successfully for ip then context output and response should match.
"""
from PassiveTotal_v2 import get_trackers_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Tracker/tracker_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('successIP')
mocker_http_request.return_value = expected_res
# Fetch the expected custom entry context from file
with open('test_data/HostAttribute/Tracker/tracker_ec.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_custom_ec = json_file.get('successIP')
# Fetch the expected human readable details from file
with open('test_data/HostAttribute/Tracker/tracker_ip_hr.md') as f:
expected_hr = f.read()
result = get_trackers_command(client, HOST_ATTRIBUTE_ARGS['tracker_by_ip'])
assert result[0].raw_response == expected_res
assert result[0].outputs == expected_custom_ec
assert result[0].readable_output == expected_hr
assert result[0].outputs_key_field == ''
assert result[0].outputs_prefix == 'PassiveTotal.Tracker'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_trackers_command_no_record_found(mocker_http_request, client):
"""
When no records found from Trackers response then result string should match.
"""
from PassiveTotal_v2 import get_trackers_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Tracker/tracker_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('emptyContent')
mocker_http_request.return_value = expected_res
result = get_trackers_command(client, HOST_ATTRIBUTE_ARGS['tracker_by_domain'])
assert result == 'No tracker(s) were found for the given argument(s).'
def test_get_host_pairs_command_main_success(mocker, client):
"""
When "pt-get-host-pairs" command executes the get_host_pairs_command function should be called from main.
"""
import PassiveTotal_v2
mocker.patch.object(demisto, 'command', return_value='pt-get-host-pairs')
mocker.patch.object(PassiveTotal_v2, 'get_host_pairs_command',
return_value='No host pair(s) were found for the given argument(s).')
PassiveTotal_v2.main()
assert PassiveTotal_v2.get_host_pairs_command.called
@patch('PassiveTotal_v2.Client.http_request')
def test_get_host_pairs_command_success(mocker_http_request, client):
"""
When "pt-get-host-pairs" command executes successfully then context output and response should match.
"""
from PassiveTotal_v2 import get_host_pairs_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Host_Pair/host_pair_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('success')
mocker_http_request.return_value = expected_res
# Fetch the expected custom entry context from file
with open('test_data/HostAttribute/Host_Pair/host_pair_ec.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_custom_ec = json_file.get('success')
# Fetch the expected human readable details from file
with open('test_data/HostAttribute/Host_Pair/host_pair_hr.md') as f:
expected_hr = f.read()
result = get_host_pairs_command(client, HOST_ATTRIBUTE_ARGS['host_pair'])
assert result.raw_response == expected_res
assert result.outputs == expected_custom_ec
assert result.readable_output == expected_hr
assert result.outputs_key_field == ''
assert result.outputs_prefix == 'PassiveTotal.HostPair'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_host_pairs_command_no_record_found(mocker_http_request, client):
"""
When no records found from Host Pairs response then result string should match.
"""
from PassiveTotal_v2 import get_host_pairs_command
# Fetch the expected response from file
with open('test_data/HostAttribute/Host_Pair/host_pair_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('emptyContent')
mocker_http_request.return_value = expected_res
result = get_host_pairs_command(client, HOST_ATTRIBUTE_ARGS['host_pair'])
assert result == 'No host pair(s) were found for the given argument(s).'
@patch('PassiveTotal_v2.Client.http_request')
def test_get_host_pairs_command_invalid_value_for_direction(client):
"""
When invalid value is provided for direction argument in 'pt-get-host-pairs' then error message should match.
"""
from PassiveTotal_v2 import get_host_pairs_command
# Configure
args = {
'query': 'dummy domain',
'direction': 'invalid direction'
}
# Execute
with pytest.raises(ValueError) as e:
get_host_pairs_command(client, args)
# Assert
assert 'The given value for direction is invalid. Supported values: children, parents.' == str(e.value)
def test_get_common_arguments_invalid_value_for_query():
"""
When invalid value is provided for query argument then error message should match.
"""
from PassiveTotal_v2 import get_common_arguments
# Configure
args = {
'query': '',
'start': '2016-02-02 02:32:44'
}
# Execute
with pytest.raises(ValueError) as e:
get_common_arguments(args)
# Assert
assert 'The given value for query is invalid.' == str(e.value)
def test_get_valid_whois_search_arguments_empty_value_in_query():
"""
When empty value enter for command argument then should raise error with proper message
"""
from PassiveTotal_v2 import get_valid_whois_search_arguments
# Configure
args = {
'query': '',
'field': 'email'
}
# Execute
with pytest.raises(ValueError) as e:
get_valid_whois_search_arguments(args)
# Assert
assert 'query or field argument should not be empty.' == str(e.value)
def test_get_valid_whois_search_invalid_value_for_field_arguemnts():
"""
When invalid value for command argument field then should raise error with proper message
"""
from PassiveTotal_v2 import get_valid_whois_search_arguments
# Configure
args = {
'query': 'test-query@test.com',
'field': 'field'
}
# Execute
with pytest.raises(ValueError) as e:
get_valid_whois_search_arguments(args)
# Assert
assert 'Invalid field type field. Valid field types are domain, email, name, organization, address, phone, ' \
'nameserver.' == str(e.value)
@patch('PassiveTotal_v2.CommandResults')
@patch('PassiveTotal_v2.Client.http_request')
def test_pt_whois_search_command_success(request_mocker, mock_cr, client):
"""
Proper Readable output and context should be set via CommonResults in case of proper response from whois-search
API endpoint
"""
from PassiveTotal_v2 import pt_whois_search_command
from PassiveTotal_v2 import get_human_readable_for_whois_commands
from PassiveTotal_v2 import get_context_for_whois_commands
# Configure
args = {
'query': 'test-query@test.com',
'field': 'email'
}
with open('test_data/whois_command/whois_command_response.json', 'rb') as f:
dummy_response = json.load(f)
with open('test_data/whois_command/whois_custom_context.json', 'rb') as f:
dummy_custom_context = json.load(f)
with open('test_data/whois_command/whois_command_readable_output.md', 'r') as f:
dummy_readable_output = f.read()
with open('test_data/whois_command/whois_command_standard_domain_readable_output.md', 'r') as f:
dummy_standard_domain_readable_output = f.read()
request_mocker.return_value = dummy_response
# Execute
domains = dummy_response.get('results')
# get human readable via dummy response
readable_output = get_human_readable_for_whois_commands(domains)
# get custom context via dummy response
custom_context = get_context_for_whois_commands(domains)[1]
pt_whois_search_command(client, args)
# Assert
# asserts the readable output
assert readable_output == dummy_readable_output
# asserts the custom context
assert custom_context == dummy_custom_context
# assert the standard domain readable output
assert dummy_standard_domain_readable_output == mock_cr.call_args_list[0][1]['readable_output']
# assert overall command output
mock_cr.assert_called_with(
outputs_prefix='PassiveTotal.WHOIS',
outputs_key_field='domain',
outputs=dummy_custom_context,
readable_output=dummy_readable_output,
raw_response=dummy_response
)
@patch('PassiveTotal_v2.Client.http_request')
def test_pt_whois_search_empty_response(request_mocker, client):
"""
Proper message should be display in case of empty response from whois-search API endpoint
"""
from PassiveTotal_v2 import pt_whois_search_command
# Configure
args = {
'query': 'test-query@test.com',
'field': 'email'
}
empty_response = '{"results": []}'
dummy_response = json.loads(empty_response)
request_mocker.return_value = dummy_response
# Execute
message = pt_whois_search_command(client, args)
# Assert
assert message == 'No domain information were found for the given argument(s).'
@patch("PassiveTotal_v2.Client.http_request")
def test_ssl_cert_search_command_success(mocker_http_request, client):
"""
When "ssl-cert-search" command executes successfully then context output and response should match.
"""
from PassiveTotal_v2 import ssl_cert_search_command
# Fetching expected raw response from file
with open('test_data/SSL/ssl_cert_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('success')
mocker_http_request.return_value = expected_res
# Fetching expected entry context details from file
with open("test_data/SSL/ssl_cert_ec.json", encoding='utf-8') as f:
expected_ec = json.load(f)
# Fetching expected entry context details from file
with open("test_data/SSL/ssl_cert_hr.md") as f:
expected_hr = f.read()
result = ssl_cert_search_command(client, SSL_ARGS)
assert result.raw_response == expected_res
assert result.outputs == expected_ec
assert result.readable_output == expected_hr
assert result.outputs_key_field == 'sha1'
assert result.outputs_prefix == 'PassiveTotal.SSL'
@patch("PassiveTotal_v2.Client.http_request")
def test_ssl_cert_search_no_record_found(mocker_http_request, client):
"""
When no record found from SSL response then result string should match.
"""
from PassiveTotal_v2 import ssl_cert_search_command
# Fetching expected raw response from file
with open("test_data/SSL/ssl_cert_resp.json", encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('zeroRecords')
mocker_http_request.return_value = expected_res
result = ssl_cert_search_command(client, SSL_ARGS)
assert result == 'No SSL certificate(s) were found for the given argument(s).'
@patch("PassiveTotal_v2.Client.http_request")
def test_get_pdns_details_command_success(mocker_http_request, client):
"""
When "get-pdns-details" command executes successfully then context output and response should match.
"""
from PassiveTotal_v2 import get_pdns_details_command
# Fetching expected raw response from file
with open('test_data/PDNS/get_pdns_resp.json', encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('success')
mocker_http_request.return_value = expected_res
# Fetching expected entry context details from file
with open("test_data/PDNS/get_pdns_ec.json", encoding='utf-8') as f:
expected_ec = json.load(f)
# Fetching expected entry context details from file
with open("test_data/PDNS/get_pdns_hr.md") as f:
expected_hr = f.read()
result = get_pdns_details_command(client, PDNS_ARGS)
assert result[0].raw_response == expected_res
assert result[0].outputs == expected_ec
assert result[0].readable_output == expected_hr
assert result[0].outputs_prefix == 'PassiveTotal.PDNS(val.resolve == obj.resolve && val.recordType == obj.recordType' \
' && val.resolveType == obj.resolveType)'
@patch("PassiveTotal_v2.Client.http_request")
def test_get_pdns_details_no_record_found(mocker_http_request, client):
"""
When no record found from PDNS response then result string should match.
"""
from PassiveTotal_v2 import get_pdns_details_command
# Fetching expected raw response from file
with open("test_data/PDNS/get_pdns_resp.json", encoding='utf-8') as f:
json_file = json.load(f)
expected_res = json_file.get('zeroRecords')
mocker_http_request.return_value = expected_res
result = get_pdns_details_command(client, SSL_ARGS)
assert result == 'No PDNS Record(s) were found for the given argument(s).'
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_missing_schema_error(mock_base_http_request, client):
"""
When http request return MissingSchema exception then appropriate error message should display.
"""
# Configure
mock_base_http_request.side_effect = MissingSchema
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Invalid API URL. No schema supplied: http(s).' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_invalid_schema_error(mock_base_http_request, client):
"""
When http request return invalid schema exception then appropriate error message should match.
"""
# Configure
mock_base_http_request.side_effect = InvalidSchema
# Execute
with pytest.raises(ValueError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Invalid API URL. Supplied schema is invalid, supports http(s).' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_proxy_error(mock_base_http_request, client):
"""
When http request return proxy error with exception then appropriate error message should match.
"""
# Configure
mock_base_http_request.side_effect = DemistoException('Proxy Error')
# Execute
with pytest.raises(ConnectionError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Proxy Error - cannot connect to proxy. Either try clearing the \'Use system proxy\' check-box or' \
' check the host, authentication details and connection details for the proxy.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_connection_error(mock_base_http_request, client):
"""
When http request return connection error with Demisto exception then appropriate error message should match.
"""
# Configure
mock_base_http_request.side_effect = DemistoException('ConnectionError')
# Execute
with pytest.raises(ConnectionError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Connectivity failed. Check your internet connection, the API URL or try increasing the HTTP(s) Request' \
' Timeout.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_connect_timeout_error(mock_base_http_request, client):
"""
When http request return connect timeout error with Demisto exception then appropriate error message
should match.
"""
# Configure
mock_base_http_request.side_effect = DemistoException('ConnectTimeout')
# Execute
with pytest.raises(ConnectionError) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'Connectivity failed. Check your internet connection, the API URL or try increasing the HTTP(s) Request' \
' Timeout.' == str(e.value)
@patch('PassiveTotal_v2.Client._http_request')
def test_http_request_other_demisto_exception(mock_base_http_request, client):
"""
When http request return other custom Demisto exception then appropriate error message should match.
"""
# Configure
mock_base_http_request.side_effect = DemistoException('custom')
# Execute
with pytest.raises(Exception) as e:
client.http_request('GET', '/test/url/suffix')
# Assert
assert 'custom' == str(e.value)
def test_init():
"""
test init function
"""
import PassiveTotal_v2
with mock.patch.object(PassiveTotal_v2, "main", return_value=42):
with mock.patch.object(PassiveTotal_v2, "__name__", "__main__"):
PassiveTotal_v2.init()
def test_domain_reputation_command_empty_domain_arguments_values(client):
"""
When multiple empty value enter for command argument then should raise error with proper message
"""
from PassiveTotal_v2 import domain_reputation_command
# Configure
args = {
'domain': ',,'
}
# Execute
with pytest.raises(ValueError) as e:
domain_reputation_command(client, args)
# Assert
assert 'domain argument should not be empty.' == str(e.value)
def test_domain_reputation_command_not_specify_domain_arguments_values(client):
"""
When no value enter for command argument then should raise error with proper message
"""
from PassiveTotal_v2 import domain_reputation_command
# Configure
args = {
'domain': ''
}
# Execute
with pytest.raises(ValueError) as e:
domain_reputation_command(client, args)
# Assert
assert 'domain(s) not specified' == str(e.value)
@patch('PassiveTotal_v2.CommandResults')
@patch('PassiveTotal_v2.Client.http_request')
def test_domain_reputatoin_command_success(request_mocker, mock_cr, client):
"""
Proper Readable output and context should be set via CommonResults in case of proper response from whois-search
API endpoint
"""
from PassiveTotal_v2 import domain_reputation_command
from PassiveTotal_v2 import get_human_readable_for_whois_commands
from PassiveTotal_v2 import get_context_for_whois_commands
# Configure
args = {
'domain': 'somedomain.com'
}
with open('test_data/domain_reputatoin/domain_reputatoin_response.json', 'rb') as f:
dummy_response = json.load(f)
with open('test_data/domain_reputatoin/domain_reputatoin_context.json', 'rb') as f:
dummy_custom_context = json.load(f)
with open('test_data/domain_reputatoin/domain_reputatoin_command_readable_output.md', 'r') as f:
dummy_readable_output = f.read()
request_mocker.return_value = dummy_response
# Execute
domains = dummy_response.get('results')
# get human readable via dummy response
readable_output = get_human_readable_for_whois_commands(
domains,
is_reputation_command=True
)
# get custom context via dummy response
standard_commands, custom_context = get_context_for_whois_commands(domains)
domain_reputation_command(client, args)
# Assert
# asserts the readable output
assert readable_output == dummy_readable_output
# asserts the custom context
assert custom_context == dummy_custom_context
# assert overall command output
mock_cr.assert_called_with(
outputs_prefix='PassiveTotal.Domain',
outputs_key_field='domain',
outputs=dummy_custom_context,
readable_output=dummy_readable_output
)
@patch('PassiveTotal_v2.CommandResults')
@patch('PassiveTotal_v2.Client.http_request')
def test_domain_reputatin_command_empty_response(request_mocker, mock_cr, client):
"""
Proper message should be display in case of empty response from whois-search API endpoint
"""
from PassiveTotal_v2 import domain_reputation_command
# Configure
args = {
'domain': 'somedomain.com'
}
empty_response = '{"results": []}'
dummy_response = json.loads(empty_response)
request_mocker.return_value = dummy_response
# Execute
domain_reputation_command(client, args)
# Assert
mock_cr.assert_called_with(
outputs_prefix='PassiveTotal.Domain',
outputs_key_field='domain',
outputs=[],
readable_output='### Domain(s)\n**No entries.**\n'
)
| 34.883065
| 123
| 0.719194
| 4,506
| 34,604
| 5.258766
| 0.06569
| 0.050135
| 0.028697
| 0.022282
| 0.854786
| 0.830942
| 0.804693
| 0.785365
| 0.759284
| 0.748945
| 0
| 0.010666
| 0.189862
| 34,604
| 991
| 124
| 34.918264
| 0.834594
| 0.180384
| 0
| 0.548872
| 0
| 0.00188
| 0.240705
| 0.104462
| 0
| 0
| 0
| 0
| 0.140977
| 1
| 0.086466
| false
| 0.18609
| 0.084586
| 0
| 0.174812
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
60fa17d83f774f11a4fbc81833844d377c9d6bc1
| 62,768
|
py
|
Python
|
tests/python/pyomexmeta_tests.py
|
nickerso/libOmexMeta
|
5088f04726c474e5be49166778aee84af4d08ea5
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pyomexmeta_tests.py
|
nickerso/libOmexMeta
|
5088f04726c474e5be49166778aee84af4d08ea5
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pyomexmeta_tests.py
|
nickerso/libOmexMeta
|
5088f04726c474e5be49166778aee84af4d08ea5
|
[
"Apache-2.0"
] | null | null | null |
import libcombine
import os
import requests
import sys
import typing
import unittest
# add the source directory to path so we can import code we are testing
_PYTHON_TESTS_DIR = os.path.dirname(__file__)
_TESTS_DIR = os.path.dirname(_PYTHON_TESTS_DIR)
_PROJECT_ROOT = os.path.dirname(_TESTS_DIR)
_SRC_DIR = os.path.join(_PROJECT_ROOT, "src")
sys.path.append(_SRC_DIR)
# module not found by IDE, but it does exist and and tests do run
from pyomexmeta import *
from test_strings import TestStrings
try:
import tellurium as te
except ImportError:
raise ImportError("package \"tellurium\" not found. Please `pip install tellurium`")
antimony = """
model TestModel
r1: A -> B; S*k1*A;
r2: B -> A; k2*B;
r3: C -> D; k3*B*C;
r4: D -> A; k4*D;
A = 100;
B = 0;
C = 100;
D = 0;
S = 1;
k1 = 0.1;
k2 = 0.1;
k3 = 0.1;
k4 = 0.1;
compartment nucleus = 1;
compartment cytosol = 1;
A in nucleus;
B in nucleus;
C in cytosol;
D in cytosol;
end
"""
SBML = te.loada(antimony).getSBML()
class TestRDF(unittest.TestCase):
rdf_str = """<?xml version="1.1" encoding="utf-8"?>
<rdf:RDF xmlns:bqbiol="http://biomodels.net/biology-qualifiers/"
xmlns:bqmodel="http://biomodels.net/model-qualifiers/"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:semsim="http://www.bhi.washington.edu/semsim#">
<rdf:Description rdf:about="http://omex-library.org/NewOmex.omex/NewModel.rdf#force_0">
<semsim:hasSinkParticipant rdf:resource="http://omex-library.org/NewOmex.omex/NewModel.rdf#sink_0"/>
<semsim:hasSourceParticipant rdf:resource="http://omex-library.org/NewOmex.omex/NewModel.rdf#source_0"/>
</rdf:Description>
<rdf:Description rdf:about="http://omex-library.org/NewOmex.omex/NewModel.rdf#parameter_metaid_0">
<bqbiol:isPropertyOf rdf:resource="http://omex-library.org/NewOmex.omex/NewModel.rdf#force_0"/>
<bqbiol:isVersionOf rdf:resource="https://identifiers.org/opb/OPB_01058"/>
</rdf:Description>
<rdf:Description rdf:about="http://omex-library.org/NewOmex.omex/NewModel.rdf#sink_0">
<semsim:hasPhysicalEntityReference rdf:resource="http://omex-library.org/NewOmex.omex/NewModel.rdf#species_metaid_1"/>
</rdf:Description>
<rdf:Description rdf:about="http://omex-library.org/NewOmex.omex/NewModel.rdf#source_0">
<semsim:hasPhysicalEntityReference rdf:resource="http://omex-library.org/NewOmex.omex/NewModel.rdf#species_metaid_0"/>
</rdf:Description>
</rdf:RDF>"""
sbml_uri = "https://www.ebi.ac.uk/biomodels/model/download/BIOMD0000000064.2?filename=BIOMD0000000064_url.xml"
sbml_file = os.path.join(os.getcwd(), "sbml_file_for_tests.sbml")
sqlite_fname = os.path.join(os.path.dirname(__file__), "sqlite_db_from_python.db")
def setUp(self) -> None:
with open(self.sbml_file, "w") as f:
f.write(self.rdf_str)
def tearDown(self) -> None:
teardown = True
if teardown:
if os.path.isfile(self.sbml_file):
os.remove(self.sbml_file)
try:
if os.path.isfile(self.sqlite_fname):
os.remove(self.sqlite_fname)
except PermissionError:
pass
def test_crete_new_rdf_obj(self):
rdf = RDF()
self.assertIsInstance(rdf._obj, int)
def test_from_string(self):
rdf = RDF.from_string(self.rdf_str, "rdfxml")
print(rdf)
self.assertEqual(6, len(rdf))
def test_from_string_for_docs(self):
rdf_str = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#>
<https://dublincore.org/specifications/dublin-core/dcmi-terms/creator> <https://orcid.org/1234-1234-1234-1234> .
"""
rdf = RDF.from_string(rdf_str, format="turtle")
self.assertEqual(1, len(rdf))
def test_add_from_string(self):
rdf = RDF()
RDF.add_from_string(rdf, self.rdf_str, "rdfxml", "test_add_from_string.rdf")
self.assertEqual(6, len(rdf))
@unittest.skip("url broken")
def test_from_uri(self):
rdf = RDF.from_uri(self.sbml_uri, "rdfxml")
self.assertEqual(277, len(rdf))
@unittest.skip("url broken")
def test_add_from_uri(self):
rdf = RDF()
RDF.add_from_uri(rdf, self.sbml_uri, "rdfxml")
self.assertEqual(277, len(rdf))
def test_from_file(self):
rdf = RDF.from_file(self.sbml_file, "rdfxml")
self.assertEqual(6, len(rdf))
def test_add_from_file(self):
rdf = RDF()
RDF.add_from_file(rdf, self.sbml_file, "rdfxml")
self.assertEqual(6, len(rdf))
def test_set_repository_uri(self):
rdf = RDF()
rdf.set_repository_uri("https://my-awesome-repository.org")
actual = rdf.get_repository_uri()
expected = "https://my-awesome-repository.org/"
self.assertEqual(expected, actual)
def test_set_archive_uri(self):
rdf = RDF()
rdf.set_archive_uri("my-awesome-archive.omex")
actual = rdf.get_archive_uri()
expected = "http://omex-library.org/my-awesome-archive.omex/"
self.assertEqual(expected, actual)
def test_set_model_uri(self):
rdf = RDF()
rdf.set_model_uri("my-awesome-model.xml")
actual = rdf.get_model_uri()
expected = "http://omex-library.org/NewOmex.omex/my-awesome-model.xml"
self.assertEqual(expected, actual)
def test_get_repository_uri(self):
rdf = RDF()
actual = rdf.get_repository_uri()
expected = "http://omex-library.org/"
self.assertEqual(expected, actual)
def test_get_archive_uri(self):
rdf = RDF()
actual = rdf.get_archive_uri()
expected = "http://omex-library.org/NewOmex.omex/"
self.assertEqual(expected, actual)
def test_get_model_uri(self):
rdf = RDF()
actual = rdf.get_model_uri()
expected = "http://omex-library.org/NewOmex.omex/NewModel.xml"
self.assertEqual(expected, actual)
def test_get_local_uri(self):
rdf = RDF()
actual = rdf.get_local_uri()
expected = "http://omex-library.org/NewOmex.omex/NewModel.rdf#"
self.assertEqual(expected, actual)
def test_query(self):
rdf = RDF.from_string(self.rdf_str, "rdfxml")
q = """SELECT ?x ?y ?z
WHERE {?x ?y ?z}
"""
expected = """x,y,z
http://omex-library.org/NewOmex.omex/NewModel.rdf#force_0,http://www.bhi.washington.edu/semsim#hasSinkParticipant,http://omex-library.org/NewOmex.omex/NewModel.rdf#sink_0
http://omex-library.org/NewOmex.omex/NewModel.rdf#force_0,http://www.bhi.washington.edu/semsim#hasSourceParticipant,http://omex-library.org/NewOmex.omex/NewModel.rdf#source_0
http://omex-library.org/NewOmex.omex/NewModel.rdf#parameter_metaid_0,http://biomodels.net/biology-qualifiers/isPropertyOf,http://omex-library.org/NewOmex.omex/NewModel.rdf#force_0
http://omex-library.org/NewOmex.omex/NewModel.rdf#parameter_metaid_0,http://biomodels.net/biology-qualifiers/isVersionOf,https://identifiers.org/opb/OPB_01058
http://omex-library.org/NewOmex.omex/NewModel.rdf#sink_0,http://www.bhi.washington.edu/semsim#hasPhysicalEntityReference,http://omex-library.org/NewOmex.omex/NewModel.rdf#species_metaid_1
http://omex-library.org/NewOmex.omex/NewModel.rdf#source_0,http://www.bhi.washington.edu/semsim#hasPhysicalEntityReference,http://omex-library.org/NewOmex.omex/NewModel.rdf#species_metaid_0
"""
self.maxDiff = None
actual = rdf.query(q, "csv")
self.assertEqual(expected, actual)
def test_use_sqlite_storage(self):
rdf = RDF("sqlite", self.sqlite_fname, "new='yes'")
rdf.add_from_string(self.rdf_str, format="rdfxml")
self.assertTrue(os.path.isfile(self.sqlite_fname))
class EditorTests(unittest.TestCase):
maxDiff = None
def setUp(self) -> None:
self.rdf = RDF()
self.editor = self.rdf.to_editor(SBML, True, False)
def test_to_editor(self):
self.assertIsInstance(self.editor, Editor)
def test_context_manager_single_annotation_with_sbml_extraction(self):
editor = self.rdf.to_editor(SBML, generate_new_metaids=True, sbml_semantic_extraction=True)
with editor.new_singular_annotation() as singular_annotation:
singular_annotation \
.about("species0000") \
.predicate("bqbiol", "is") \
.resource_uri("uniprot:PD88776")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:MediatorParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:ProcessProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0001
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0002
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0002> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0003
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0003> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SinkParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SinkParticipant0002
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0003> .
local:SinkParticipant0003
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0002
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0002> .
local:SourceParticipant0003
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0003> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001>
semsim:hasSinkParticipant local:SinkParticipant0001 ;
semsim:hasSourceParticipant local:SourceParticipant0001 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0002>
semsim:hasMediatorParticipant local:MediatorParticipant0000 ;
semsim:hasSinkParticipant local:SinkParticipant0002 ;
semsim:hasSourceParticipant local:SourceParticipant0002 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0003>
semsim:hasSinkParticipant local:SinkParticipant0003 ;
semsim:hasSourceParticipant local:SourceParticipant0003 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:PD88776> ;
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#nucleus> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0001>
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#nucleus> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0002>
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#cytosol> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0003>
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#cytosol> ."""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_context_manager_single_annotation_without_sbml_extraction(self):
editor = self.rdf.to_editor(SBML, generate_new_metaids=True, sbml_semantic_extraction=False)
with editor.new_singular_annotation() as singular_annotation:
singular_annotation \
.about("#species0000") \
.predicate("bqbiol", "is") \
.resource_uri("uniprot:PD88776")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:PD88776> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_context_manager_single_annotation_simple(self):
editor = self.rdf.to_editor(SBML, generate_new_metaids=True, sbml_semantic_extraction=False)
with editor.new_singular_annotation() as example01_singular_annotation:
example01_singular_annotation.about("species0000") \
.predicate("bqbiol", "is") \
.resource_uri("CHEBI:16236")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/CHEBI:16236> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_context_manager_single_annotation_simple2(self):
editor = self.rdf.to_editor(SBML, generate_new_metaids=True, sbml_semantic_extraction=False)
with editor.new_singular_annotation() as example01_singular_annotation:
example01_singular_annotation.about("species0000")
example01_singular_annotation.predicate("bqbiol", "is")
example01_singular_annotation.resource_uri("CHEBI:16236")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/CHEBI:16236> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_context_manager_personal_information(self):
editor = self.rdf.to_editor(SBML, True, False)
with editor.new_personal_information() as information:
information \
.add_creator("1234-1234-1234-1234") \
.add_mbox("annotations@uw.edu") \
.add_name("Ciaran Welsh")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix dc: <https://dublincore.org/specifications/dublin-core/dcmi-terms/> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml>
dc:creator <http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000>
foaf:mbox "annotations@uw.edu" ;
foaf:name "Ciaran Welsh" ;
dc:creator <https://identifiers.org/orcid/1234-1234-1234-1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_sbml1(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
property = editor.new_physical_property()
property.about("EntityProperty", eUriType.LOCAL_URI) \
.is_version_of("opb:OPB_12345") \
.is_property_of("species0001", eUriType.MODEL_URI)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("species0001", eUriType.MODEL_URI) \
.identity("uniprot:PD12345") \
.is_part_of("fma:1234") \
.has_property(property=property)
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:EntityProperty
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_12345> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0001>
bqbiol:is <https://identifiers.org/uniprot:PD12345> ;
bqbiol:isPartOf <https://identifiers.org/fma:1234> ."""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_sbml2(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("species0000", eUriType.MODEL_URI) \
.identity("uniprot:PD12345") \
.is_part_of("fma:1234") \
.has_property("EntityProperty", eUriType.LOCAL_URI, "opb:OPB_12345")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:EntityProperty
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_12345> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:PD12345> ;
bqbiol:isPartOf <https://identifiers.org/fma:1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_sbml3(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("species0000", eUriType.MODEL_URI) \
.identity("uniprot:PD12345") \
.is_part_of("fma:1234") \
.has_property("opb:OPB_12345")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:EntityProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_12345> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:PD12345> ;
bqbiol:isPartOf <https://identifiers.org/fma:1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_sbml4(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("species0000", eUriType.MODEL_URI) \
.identity("uniprot:PD12345") \
.is_part_of("fma:1234")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:PD12345> ;
bqbiol:isPartOf <https://identifiers.org/fma:1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_cellml1(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
property = editor.new_physical_property()
property.about("main.Volume", eUriType.MODEL_URI) \
.is_version_of("opb:OPB_00154") \
.is_property_of("entity0", eUriType.LOCAL_URI)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("entity0", eUriType.LOCAL_URI) \
.identity("fma:9570") \
.is_part_of("fma:18228") \
.has_property(property=property)
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:entity0
bqbiol:is <https://identifiers.org/fma:9570> ;
bqbiol:isPartOf <https://identifiers.org/fma:18228> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.Volume>
bqbiol:isPropertyOf local:entity0 ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00154> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_cellml2(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_physical_entity() as physical_entity:
physical_entity.about("entity0", eUriType.LOCAL_URI) \
.identity("fma:9570") \
.is_part_of("fma:18228") \
.has_property("main.Volume", eUriType.MODEL_URI, "opb:OPB_00154")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:entity0
bqbiol:is <https://identifiers.org/fma:9570> ;
bqbiol:isPartOf <https://identifiers.org/fma:18228> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.Volume>
bqbiol:isPropertyOf local:entity0 ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00154> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_entity_cellml3(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_physical_entity() as physical_entity:
physical_entity \
.identity("fma:9570") \
.is_part_of("fma:18228") \
.has_property(property_about="main.Volume", about_uri_type=eUriType.MODEL_URI,
is_version_of="opb:OPB_00154")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:Entity0000
bqbiol:is <https://identifiers.org/fma:9570> ;
bqbiol:isPartOf <https://identifiers.org/fma:18228> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.Volume>
bqbiol:isPropertyOf local:Entity0000 ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00154> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_process_sbml1(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_physical_process() as physical_process:
physical_process.about("reaction0000", eUriType.MODEL_URI) \
.add_source("species0000", eUriType.MODEL_URI, 1) \
.add_sink("species0001", eUriType.MODEL_URI, 1) \
.add_mediator("species0002", eUriType.MODEL_URI) \
.has_property("ReactionProperty", eUriType.LOCAL_URI, "opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:MediatorParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0002> .
local:ReactionProperty
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasMediatorParticipant local:MediatorParticipant0000 ;
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_process_sbml2(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_physical_process() as physical_process:
physical_process.about("reaction0000", eUriType.MODEL_URI) \
.add_source("species0000", eUriType.MODEL_URI, 1) \
.add_sink("species0001", eUriType.MODEL_URI, 1) \
.add_mediator("species0002", eUriType.MODEL_URI) \
.has_property(is_version_of="opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:MediatorParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0002> .
local:ProcessProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasMediatorParticipant local:MediatorParticipant0000 ;
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_process_cellml1(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_physical_process() as physical_process:
physical_process.about("Process", eUriType.LOCAL_URI) \
.add_source("entity1", eUriType.LOCAL_URI, 1) \
.add_sink("entity2", eUriType.LOCAL_URI, 1) \
.add_mediator("entity3", eUriType.LOCAL_URI) \
.has_property("main.ReactionRate", eUriType.MODEL_URI, "opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:MediatorParticipant0000
semsim:hasPhysicalEntityReference local:entity3 .
local:Process
semsim:hasMediatorParticipant local:MediatorParticipant0000 ;
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference local:entity2 .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference local:entity1 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.ReactionRate>
bqbiol:isPropertyOf local:Process ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_physical_process_cellml2(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_physical_process() as physical_process:
physical_process \
.add_source("entity1", eUriType.LOCAL_URI, 1) \
.add_sink("entity2", eUriType.LOCAL_URI, 1) \
.add_mediator("entity3", eUriType.LOCAL_URI) \
.has_property(property_about="main.ReactionRate", about_uri_type=eUriType.MODEL_URI,
is_version_of="opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:MediatorParticipant0000
semsim:hasPhysicalEntityReference local:entity3 .
local:Process0000
semsim:hasMediatorParticipant local:MediatorParticipant0000 ;
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference local:entity2 .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference local:entity1 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.ReactionRate>
bqbiol:isPropertyOf local:Process0000 ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_energy_diff_sbml1(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_energy_diff() as energy_diff:
energy_diff.about("reaction0000", eUriType.MODEL_URI) \
.add_source("species0000", eUriType.MODEL_URI) \
.add_sink("species0001", eUriType.MODEL_URI) \
.has_property("localParameter0000", eUriType.LOCAL_URI, "opb:OPB_01058")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:SinkParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:localParameter0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_01058> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_energy_diff_sbml2(self):
editor = self.rdf.to_editor(TestStrings.sbml, True, False)
with editor.new_energy_diff() as energy_diff:
energy_diff.about("reaction0001", eUriType.MODEL_URI) \
.add_source("species0001", eUriType.MODEL_URI) \
.add_sink("species0000", eUriType.MODEL_URI) \
.has_property(is_version_of="opb:OPB_01058")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:EnergyDiffProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_01058> .
local:SinkParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0000
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_energy_diff_sbml3(self):
sbml = """<sbml xmlns="http://www.sbml.org/sbml/level3/version1/core" level="3" version="1">
<model metaid="NernstExample" id="NernstExample">
<listOfCompartments>
<compartment id="cytoplasm" metaid="cytoplasm" spatialDimensions="3" size="1" constant="true"/>
<compartment id="extracellular" metaid="extracellular" spatialDimensions="3" size="1" constant="true"/>
</listOfCompartments>
<listOfSpecies>
<species id="Ca_ex" metaid="Ca_ex" compartment="extracellular" initialConcentration="2" hasOnlySubstanceUnits="false" boundaryCondition="false" constant="false"/>
<species id="Ca_cyt" metaid="Ca_cyt" compartment="cytoplasm" initialConcentration="0.07" hasOnlySubstanceUnits="false" boundaryCondition="false" constant="false"/>
</listOfSpecies>
<listOfParameters>
<parameter id="NP" metaid="NernstPotential" value="137.04" constant="true"/>
</listOfParameters>
</model>
</sbml>"""
rdf_graph = RDF()
rdf_graph.set_archive_uri("Example.omex")
rdf_graph.set_model_uri("Example.sbml")
editor = rdf_graph.to_editor(sbml, generate_new_metaids=False, sbml_semantic_extraction=False)
# Ca_cyt: Calcium Ions cytosol
# Ca_ex: Calcium Ions extracellular space
# NernstReversalPotential_in: The metaID of the SBML reaction
# OPB/OPB_01581: Nernst reversal potential
with editor.new_energy_diff() as energy_in:
energy_in \
.about("EnergyDiff000", eUriType.LOCAL_URI) \
.add_source(physical_entity_reference="Ca_ex", uri_type=eUriType.MODEL_URI) \
.add_sink(physical_entity_reference="Ca_cyt", uri_type=eUriType.MODEL_URI) \
.has_property(property_about="NernstPotential", about_uri_type=eUriType.MODEL_URI,
is_version_of="OPB:OPB_01581")
print(rdf_graph)
def test_energy_diff_cellml1(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_energy_diff() as energy_diff:
energy_diff.about("main.MembraneVoltage", eUriType.MODEL_URI) \
.add_source("entity1", eUriType.LOCAL_URI) \
.add_sink("entity2", eUriType.LOCAL_URI) \
.has_property("EnergyDiffProperty", eUriType.MODEL_URI, "opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:SinkParticipant0000
semsim:hasPhysicalEntityReference local:entity2 .
local:SourceParticipant0000
semsim:hasPhysicalEntityReference local:entity1 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#EnergyDiffProperty>
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#main.MembraneVoltage> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.MembraneVoltage>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
def test_energy_diff_cellml2(self):
editor = self.rdf.to_editor(TestStrings.cellml, True, False)
with editor.new_energy_diff() as energy_diff:
energy_diff.about("main.MembraneVoltage", eUriType.MODEL_URI) \
.add_source("entity1", eUriType.LOCAL_URI) \
.add_sink("entity2", eUriType.LOCAL_URI) \
.has_property("EnergyDiffProperty", eUriType.MODEL_URI, "opb:OPB_00592")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:SinkParticipant0000
semsim:hasPhysicalEntityReference local:entity2 .
local:SourceParticipant0000
semsim:hasPhysicalEntityReference local:entity1 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#EnergyDiffProperty>
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#main.MembraneVoltage> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#main.MembraneVoltage>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
"""
self.assertTrue(RDF.equals_rdf_vs_string(self.rdf, expected))
class AnnotateAModelTest(unittest.TestCase):
maxDiff = None
def setUp(self) -> None:
ant = """
model SmadNuclearTransport
compartment cytosol;
compartment nucleus;
Smad3Cyt in cytosol;
Smad3Nuc in nucleus;
k1 = 0.1;
k2 = 1;
Smad3Nuc = 10;
Smad3Cyt = 10;
r1: Smad3Nuc => Smad3Cyt; k1*Smad3Nuc;
r2: Smad3Cyt => Smad3Nuc; k2*Smad3Cyt;
end
"""
self.sbml = te.antimonyToSBML(ant)
def test_get_metaids(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True)
metaids = editor.get_metaids()
expected = ['SmadNuclearTransport',
'compartment0000',
'compartment0001',
'species0000',
'species0001',
'parameter0000',
'parameter0001',
'reaction0000',
'kineticLaw0000',
'reaction0001',
'kineticLaw0001']
actual = metaids
self.assertEqual(expected, actual)
def test_get_xml(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True)
xml_with_metaids = editor.get_xml()
expected = """<?xml version="1.1" encoding="UTF-8"?>
<!-- Created by libAntimony version v2.12.0.3 with libSBML version 5.18.1. -->
<sbml xmlns="http://www.sbml.org/sbml/level3/version1/core" level="3" version="1">
<model metaid="SmadNuclearTransport" id="SmadNuclearTransport">
<listOfCompartments>
<compartment id="cytosol" spatialDimensions="3" constant="true" metaid="#species0000"/>
<compartment id="nucleus" spatialDimensions="3" constant="true" metaid="#OmexMetaId0001"/>
</listOfCompartments>
<listOfSpecies>
<species id="Smad3Cyt" compartment="cytosol" initialConcentration="10" hasOnlySubstanceUnits="false" boundaryCondition="false" constant="false" metaid="#OmexMetaId0002"/>
<species id="Smad3Nuc" compartment="nucleus" initialConcentration="10" hasOnlySubstanceUnits="false" boundaryCondition="false" constant="false" metaid="#OmexMetaId0003"/>
</listOfSpecies>
<listOfParameters>
<parameter id="k1" value="0.1" constant="true"/>
<parameter id="k2" value="1" constant="true"/>
</listOfParameters>
<listOfReactions>
<reaction id="r1" reversible="false" fast="false" metaid="#OmexMetaId0004">
<listOfReactants>
<speciesReference species="Smad3Nuc" stoichiometry="1" constant="true"/>
</listOfReactants>
<listOfProducts>
<speciesReference species="Smad3Cyt" stoichiometry="1" constant="true"/>
</listOfProducts>
<kineticLaw metaid="#OmexMetaId0005">
<math xmlns="http://www.w3.org/1998/Math/MathML">
<apply>
<times/>
<ci> k1 </ci>
<ci> Smad3Nuc </ci>
</apply>
</math>
</kineticLaw>
</reaction>
<reaction id="r2" reversible="false" fast="false" metaid="#OmexMetaId0006">
<listOfReactants>
<speciesReference species="Smad3Cyt" stoichiometry="1" constant="true"/>
</listOfReactants>
<listOfProducts>
<speciesReference species="Smad3Nuc" stoichiometry="1" constant="true"/>
</listOfProducts>
<kineticLaw metaid="#OmexMetaId0007">
<math xmlns="http://www.w3.org/1998/Math/MathML">
<apply>
<times/>
<ci> k2 </ci>
<ci> Smad3Cyt </ci>
</apply>
</math>
</kineticLaw>
</reaction>
</listOfReactions>
</model>
</sbml>
"""
actual = xml_with_metaids
print(actual)
self.assertTrue(expected, actual)
def test_annotate_model(self):
"""
Tests the annotation of a model created in setup.
Note: autogenerate the participant ID, currently users,
are asked to give the id, but this isn't really necessary.
Returns:
"""
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True)
# model level annotations
with editor.new_singular_annotation() as author:
author.about("SmadNuclearTransport") \
.predicate_from_uri("https://unknownpredicate.com/changeme#author") \
.resource_literal("Ciaran Welsh")
# annotate Smad3nuc
with editor.new_physical_entity() as smad3nuc:
smad3nuc \
.about("species0000", eUriType.MODEL_URI) \
.has_property(is_version_of="OPB:OPB_00340") \
.identity("uniprot:P84022") \
.is_part_of("obo/FMA_7163") \
.is_part_of("obo/FMA_264020")
# annotate Smad3nuc
with editor.new_physical_entity() as smad3nuc:
smad3nuc \
.about("species0001", eUriType.MODEL_URI) \
.has_property(is_version_of="OPB:OPB_00340") \
.identity("uniprot:P84022") \
.is_part_of("obo/FMA_7163") \
.is_part_of("obo/FMA_63877") \
.is_part_of("obo/FMA_63840")
# annotate r1 (Smad3Nuc -> Smad3Cyt)
with editor.new_physical_process() as export_reaction:
export_reaction \
.about("reaction0000", eUriType.MODEL_URI) \
.has_property(is_version_of="OPB:OPB_00237") \
.add_source("species0000", eUriType.MODEL_URI, 1) \
.add_sink("species0001", eUriType.MODEL_URI, 1)
# annotate r2 (Smad3Cyt -> Smad3Nuc)
with editor.new_physical_process() as export_reaction:
export_reaction \
.about("reaction0001", eUriType.MODEL_URI) \
.has_property(is_version_of="OPB:OPB_00237") \
.add_source("species0001", eUriType.MODEL_URI, 1) \
.add_sink("species0000", eUriType.MODEL_URI, 1)
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:EntityProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> ;
bqbiol:isVersionOf <https://identifiers.org/OPB:OPB_00340> .
local:EntityProperty0001
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> ;
bqbiol:isVersionOf <https://identifiers.org/OPB:OPB_00340> .
local:ProcessProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0001
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0002
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/OPB:OPB_00237> .
local:ProcessProperty0003
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001> ;
bqbiol:isVersionOf <https://identifiers.org/OPB:OPB_00237> .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SinkParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SinkParticipant0002
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SinkParticipant0003
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0002
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SourceParticipant0003
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#SmadNuclearTransport>
<https://unknownpredicate.com/changeme#author> "Ciaran Welsh" .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasSinkParticipant local:SinkParticipant0000, local:SinkParticipant0002 ;
semsim:hasSourceParticipant local:SourceParticipant0000, local:SourceParticipant0002 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001>
semsim:hasSinkParticipant local:SinkParticipant0001, local:SinkParticipant0003 ;
semsim:hasSourceParticipant local:SourceParticipant0001, local:SourceParticipant0003 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:is <https://identifiers.org/uniprot:P84022> ;
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#cytosol>, <https://identifiers.org/obo/FMA_264020>, <https://identifiers.org/obo/FMA_7163> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0001>
bqbiol:is <https://identifiers.org/uniprot:P84022> ;
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#nucleus>, <https://identifiers.org/obo/FMA_63840>, <https://identifiers.org/obo/FMA_63877>, <https://identifiers.org/obo/FMA_7163> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(rdf, expected))
def test_to_editor_with_sbml_extraction(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True, sbml_semantic_extraction=True)
# model level annotations
with editor.new_singular_annotation() as author:
author.about("SmadNuclearTransport") \
.predicate_from_uri("https://unknownpredicate.com/changeme#author") \
.resource_literal("Ciaran Welsh")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix bqbiol: <http://biomodels.net/biology-qualifiers/> .
@prefix semsim: <http://bime.uw.edu/semsim/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
local:ProcessProperty0000
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:ProcessProperty0001
bqbiol:isPropertyOf <http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001> ;
bqbiol:isVersionOf <https://identifiers.org/opb:OPB_00592> .
local:SinkParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
local:SinkParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0000
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0001> .
local:SourceParticipant0001
semsim:hasMultiplier "1"^^rdf:double ;
semsim:hasPhysicalEntityReference <http://omex-library.org/NewOmex.omex/NewModel.xml#species0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#SmadNuclearTransport>
<https://unknownpredicate.com/changeme#author> "Ciaran Welsh" .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0000>
semsim:hasSinkParticipant local:SinkParticipant0000 ;
semsim:hasSourceParticipant local:SourceParticipant0000 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#reaction0001>
semsim:hasSinkParticipant local:SinkParticipant0001 ;
semsim:hasSourceParticipant local:SourceParticipant0001 .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0000>
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#cytosol> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#species0001>
bqbiol:isPartOf <http://omex-library.org/NewOmex.omex/NewModel.xml#nucleus> ."""
self.assertTrue(RDF.equals_rdf_vs_string(rdf, expected))
def test_to_editor_without_sbml_extraction(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True, sbml_semantic_extraction=False)
# model level annotations
with editor.new_singular_annotation() as author:
author.about("SmadNuclearTransport") \
.predicate_from_uri("https://unknownpredicate.com/changeme#author") \
.resource_literal("Ciaran Welsh")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#SmadNuclearTransport>
<https://unknownpredicate.com/changeme#author> "Ciaran Welsh" .
"""
self.assertTrue(RDF.equals_rdf_vs_string(rdf, expected))
def test_personal_information(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True, sbml_semantic_extraction=False)
with editor.new_personal_information() as personal_information:
personal_information.add_creator("1234-1234-1234-1234") \
.add_name("Ciaran") \
.add_mbox("cwelsh2@uw.edu") \
.add_account_name("1234-1234-1234-1234") \
.add_account_service_homepage("https://github.com/sys-bio/libomexmeta")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix dc: <https://dublincore.org/specifications/dublin-core/dcmi-terms/> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml>
dc:creator <http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000>
foaf:accountName <https://orcid.org/1234-1234-1234-1234> ;
foaf:accountServiceHomepage <https://github.com/sys-bio/libomexmeta> ;
foaf:mbox "cwelsh2@uw.edu" ;
foaf:name "Ciaran" ;
dc:creator <https://identifiers.org/orcid/1234-1234-1234-1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(rdf, expected))
def test_model_level_annotation(self):
rdf = RDF()
editor = rdf.to_editor(self.sbml, generate_new_metaids=True, sbml_semantic_extraction=False)
with editor.new_personal_information() as personal_information:
personal_information.add_creator("1234-1234-1234-1234") \
.add_name("Ciaran") \
.add_mbox("cwelsh2@uw.edu") \
.add_account_name("1234-1234-1234-1234") \
.add_account_service_homepage("https://github.com/sys-bio/libomexmeta")
expected = """@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix dc: <https://dublincore.org/specifications/dublin-core/dcmi-terms/> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix OMEXlib: <http://omex-library.org/> .
@prefix myOMEX: <http://omex-library.org/NewOmex.omex/> .
@prefix local: <http://omex-library.org/NewOmex.omex/NewModel.rdf#> .
<http://omex-library.org/NewOmex.omex/NewModel.xml>
dc:creator <http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000> .
<http://omex-library.org/NewOmex.omex/NewModel.xml#PersonalInfo0000>
foaf:accountName <https://orcid.org/1234-1234-1234-1234> ;
foaf:accountServiceHomepage <https://github.com/sys-bio/libomexmeta> ;
foaf:mbox "cwelsh2@uw.edu" ;
foaf:name "Ciaran" ;
dc:creator <https://identifiers.org/orcid/1234-1234-1234-1234> .
"""
self.assertTrue(RDF.equals_rdf_vs_string(rdf, expected))
class GoldStandardOmexArchiveTests(unittest.TestCase):
maxDiff = None
# urls and filepaths for the gold standard omex archives
gold_standard_url1 = "https://auckland.figshare.com/ndownloader/files/17432333"
gold_standard_url2 = "https://auckland.figshare.com/ndownloader/files/15425522"
gold_standard_url3 = "https://auckland.figshare.com/ndownloader/files/15425513"
gold_standard_url4 = "https://auckland.figshare.com/ndownloader/files/15425546"
gold_standard_url5 = "https://auckland.figshare.com/ndownloader/files/17432366"
gold_standard_filename1 = os.path.join(os.getcwd(), "goldstandard1.omex")
gold_standard_filename2 = os.path.join(os.getcwd(), "goldstandard2.omex")
gold_standard_filename3 = os.path.join(os.getcwd(), "goldstandard3.omex")
gold_standard_filename4 = os.path.join(os.getcwd(), "goldstandard4.omex")
gold_standard_filename5 = os.path.join(os.getcwd(), "goldstandard5.omex")
def setUp(self) -> None:
pass
def download_file(self, url: str, local_fname: str):
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(local_fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
return local_fname
def tearDown(self) -> None:
teardown = True
if teardown:
for i in [
self.gold_standard_filename1,
self.gold_standard_filename2,
self.gold_standard_filename3,
self.gold_standard_filename4,
self.gold_standard_filename5]:
if os.path.isfile(i):
os.remove(i)
def extract_rdf_from_combine_archive(self, archive_path: str) -> typing.List[str]:
if not os.path.isfile(archive_path):
raise FileNotFoundError(archive_path)
# read the archive using libcombine
archive = libcombine.CombineArchive()
# note the skipOmex flag. This is needed to expose any files with an "rdf" extension.
archive.initializeFromArchive(archive_path, skipOmex=True) # new in libcombine!
# filter through the entries in the omex archive for rdf extension files
annotation_entries = [i.c_str() for i in archive.getAllLocations() if i[-4:] == ".rdf"]
# read the rdf into a python string
return [archive.extractEntryToString(i) for i in annotation_entries]
def gold_standard_test(self, gold_standard_url: str, gold_standard_filename: str, size: int):
# get the gold standard omex file from the tinterweb
self.download_file(gold_standard_url, gold_standard_filename)
# get rdf string from omex file usign libcombine
rdf_strings = self.extract_rdf_from_combine_archive(gold_standard_filename)
assert (len(rdf_strings) == 1), len(rdf_strings)
# now libomexmeta can read the string into an rdf graph
rdf = RDF.from_string(rdf_strings[0])
# serialize to html, because why not?
self.assertEqual(size, len(rdf))
def test_gold_standard1(self):
self.gold_standard_test(self.gold_standard_url1, self.gold_standard_filename1, 23)
def test_gold_standard2(self):
self.gold_standard_test(self.gold_standard_url2, self.gold_standard_filename2, 429)
def test_gold_standard3(self):
self.gold_standard_test(self.gold_standard_url3, self.gold_standard_filename3, 546)
def test_gold_standard4(self):
self.gold_standard_test(self.gold_standard_url4, self.gold_standard_filename4, 629)
def test_gold_standard5(self):
self.gold_standard_test(self.gold_standard_url5, self.gold_standard_filename5, 69)
def test_query(self):
self.download_file(self.gold_standard_url1, self.gold_standard_filename1)
s = self.extract_rdf_from_combine_archive(self.gold_standard_filename1)[0]
rdf = RDF.from_string(s, "rdfxml")
query_str = """
PREFIX bqbiol: <http://biomodels.net/biology-qualifiers/>
SELECT ?x ?y ?z
WHERE {
?x ?y ?z
}"""
results = rdf.query(query_str, "rdfxml")
results_rdf = RDF()
results_rdf.add_from_string(results)
self.assertEqual(234, len(results_rdf))
class DrawTests(unittest.TestCase):
def setUp(self) -> None:
ant = """
model SBML1
compartment cytosol = 1.0;
A in cytosol;
B in cytosol
A = 10;
B = 0;
k1 = 0.1;
k2 = 0.1;
r1: A => B; k1*A
r1: B => A; k2*B
end
"""
self.sbml = te.antimonyToSBML(ant)
self.output_filename = os.path.join(os.path.dirname(__file__), "test_draw")
def tearDown(self) -> None:
if os.path.isfile(self.output_filename + ".jpeg"):
os.remove(self.output_filename + ".jpeg")
if os.path.isfile(self.output_filename):
os.remove(self.output_filename)
@unittest.skip("graphviz executable not found on some systems (on CI). ")
def test(self):
rdf = RDF()
with rdf.to_editor(self.sbml, generate_new_metaids=True) as editor:
with editor.new_singular_annotation() as s:
s.about("species0000") \
.predicate("bqbiol", "is") \
.resource_uri("fma/FMA_66835")
rdf.draw(self.output_filename, format="jpeg")
self.assertTrue(os.path.isfile(self.output_filename))
class ErrorTests(unittest.TestCase):
def test_make_a_mistake(self):
from pyomexmeta import OmexMetaException
rdf = RDF()
editor = rdf.to_editor(TestStrings.sbml, False, False)
with self.assertRaises(OmexMetaException):
with editor.new_singular_annotation() as s:
s.about("something")
if __name__ == "__main__":
unittest.main()
| 45.059584
| 203
| 0.690288
| 7,430
| 62,768
| 5.699731
| 0.072275
| 0.03967
| 0.074382
| 0.089258
| 0.83865
| 0.807575
| 0.772675
| 0.750667
| 0.729415
| 0.708683
| 0
| 0.043452
| 0.171361
| 62,768
| 1,392
| 204
| 45.091954
| 0.770764
| 0.018258
| 0
| 0.650442
| 0
| 0.137168
| 0.610843
| 0.098244
| 0
| 0
| 0
| 0
| 0.044248
| 1
| 0.057522
| false
| 0.00177
| 0.010619
| 0
| 0.090265
| 0.002655
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60fbfb9de307ca053cbae7451393247a61218423
| 146
|
py
|
Python
|
common/aist_common/grammar/equivalence_class/invalid.py
|
sfahad1414/AGENT
|
84069edc96b6190bb03ffd5099cbc8966061a563
|
[
"Apache-2.0"
] | 15
|
2020-05-06T16:17:56.000Z
|
2022-03-30T12:25:16.000Z
|
common/aist_common/grammar/equivalence_class/invalid.py
|
dionny/AGENT
|
8a833406b590e23623fcc67db99f6f964d002396
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:17:16.000Z
|
2022-02-10T06:35:58.000Z
|
common/aist_common/grammar/equivalence_class/invalid.py
|
dionny/AGENT
|
8a833406b590e23623fcc67db99f6f964d002396
|
[
"Apache-2.0"
] | 7
|
2020-04-07T18:47:55.000Z
|
2022-03-30T12:14:58.000Z
|
class Invalid:
def __init__(self):
self.equivalence_class = "INVALID"
def __str__(self):
return self.equivalence_class
| 16.222222
| 42
| 0.657534
| 16
| 146
| 5.375
| 0.5
| 0.27907
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.260274
| 146
| 8
| 43
| 18.25
| 0.796296
| 0
| 0
| 0
| 0
| 0
| 0.048611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
60ffbb95d891a7d42e3938386b182c5737df50c5
| 65,442
|
py
|
Python
|
tests/data/settings_files.py
|
wingify/vwo-python-sdk
|
8b8e798a16c43012ca2c6c6c85dde66f4f3cb6a5
|
[
"Apache-2.0"
] | 14
|
2019-08-06T06:57:46.000Z
|
2022-01-05T13:27:50.000Z
|
tests/data/settings_files.py
|
wingify/vwo-python-sdk
|
8b8e798a16c43012ca2c6c6c85dde66f4f3cb6a5
|
[
"Apache-2.0"
] | 3
|
2019-08-19T10:29:17.000Z
|
2021-09-16T15:59:38.000Z
|
tests/data/settings_files.py
|
wingify/vwo-python-sdk
|
8b8e798a16c43012ca2c6c6c85dde66f4f3cb6a5
|
[
"Apache-2.0"
] | 10
|
2019-08-08T12:38:50.000Z
|
2021-09-14T11:35:00.000Z
|
# Copyright 2019-2021 Wingify Software Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Various settings_file for testings
Notes:
Abbreviations: T = percentTraffic
W = weight split
AB = VISUAL_AB
FT = FEATURE_TEST
FR = FEATURE_ROLLOUT
IFEF = isFeatureEnabled is False
WS = With Segments
WW = With Whitelisting
Campaigns key of each campaign is same as setttings_file name.
"""
SETTINGS_FILES = {
"EMPTY_SETTINGS_FILE": {},
"AB_T_50_W_50_50": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 213, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 50},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 50},
],
"id": 230,
"name": "Campaign-230",
"percentTraffic": 50,
"key": "AB_T_50_W_50_50",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"AB_T_100_W_50_50": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [
{"identifier": "abcd", "id": 1, "type": "REVENUE_TRACKING"},
{"identifier": "CUSTOM", "id": 214, "type": "CUSTOM_GOAL"},
],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 50},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 50},
],
"id": 231,
"name": "Campaign-231",
"percentTraffic": 100,
"key": "AB_T_100_W_50_50",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"AB_T_100_W_20_80": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 215, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 20},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 80},
],
"id": 232,
"name": "Campaign-232",
"percentTraffic": 100,
"key": "AB_T_100_W_20_80",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"AB_T_20_W_10_90": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 216, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 10},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 90},
],
"id": 233,
"name": "Campaign-233",
"percentTraffic": 20,
"key": "AB_T_20_W_10_90",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"AB_T_100_W_0_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 217, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 0},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 100},
],
"id": 234,
"name": "Campaign-234",
"percentTraffic": 100,
"key": "AB_T_100_W_0_100",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"AB_T_100_W_33_33_33": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 218, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 33.3333},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 33.3333},
{"id": 3, "name": "Variation-2", "changes": {}, "weight": 33.3333},
],
"id": 235,
"name": "Campaign-235",
"percentTraffic": 100,
"key": "AB_T_100_W_33_33_33",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"DUMMY_SETTINGS_FILE": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "GOAL_NEW", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": "1", "name": "Control", "weight": 40},
{"id": "2", "name": "Variation-1", "weight": 60},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 50,
"key": "DUMMY_SETTINGS_FILE",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"FR_T_0_W_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 213, "type": "CUSTOM_GOAL"}],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "this_is_a_string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 123.456},
{"id": 4, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True},
{
"id": 5,
"key": "JSON_VARIABLE",
"type": "json",
"value": {
"data_string": "this_is_a_string",
"data_integer": "123",
"data_boolean": True,
"data_double": 123.456,
"data_json": {"json": "json"},
},
},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 0,
"key": "FR_T_0_W_100",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FR_T_25_W_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "this_is_a_string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 123.456},
{"id": 4, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True},
{
"id": 5,
"key": "JSON_VARIABLE",
"type": "json",
"value": {
"data_string": "this_is_a_string",
"data_integer": "123",
"data_boolean": True,
"data_double": 123.456,
"data_json": {"json": "json"},
},
},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 25,
"key": "FR_T_25_W_100",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FR_T_50_W_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "this_is_a_string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 123.456},
{"id": 4, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True},
{
"id": 5,
"key": "JSON_VARIABLE",
"type": "json",
"value": {
"data_string": "this_is_a_string",
"data_integer": "123",
"data_boolean": True,
"data_double": 123.456,
"data_json": {"json": "json"},
},
},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 50,
"key": "FR_T_50_W_100",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FR_T_75_W_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "this_is_a_string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 123.456},
{"id": 4, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True},
{
"id": 5,
"key": "JSON_VARIABLE",
"type": "json",
"value": {
"data_string": "this_is_a_string",
"data_integer": "123",
"data_boolean": True,
"data_double": 123.456,
"data_json": {"json": "json"},
},
},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 75,
"key": "FR_T_75_W_100",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FR_T_100_W_100": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "this_is_a_string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 123.456},
{"id": 4, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True},
{
"id": 5,
"key": "JSON_VARIABLE",
"type": "json",
"value": {
"data_string": "this_is_a_string",
"data_integer": "123",
"data_boolean": True,
"data_double": 123.456,
"data_json": {"json": "json"},
},
},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 100,
"key": "FR_T_100_W_100",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FR_T_100_WW": {
"sdkKey": "someuniquestuff1234567",
"groups": {},
"campaignGroups": {},
"campaigns": [
{
"goals": [],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 100,
"segments": {"or": [{"custom_variable": {"safari": "true"}}]},
}
],
"variables": [{"id": 2, "key": "BOOLEAN_VARIABLE", "type": "boolean", "value": True}],
"id": 29,
"percentTraffic": 100,
"isForcedVariationEnabled": True,
"key": "FR_T_100_WW",
"name": "Campaign-24",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
"segments": {},
}
],
"accountId": 123456,
"version": 2,
},
"FR_WRONG_VARIABLE_TYPE": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
# STRING:
{"id": 1, "key": "STRING_TO_INTEGER", "type": "integer", "value": "123"},
{"id": 2, "key": "STRING_TO_FLOAT", "type": "double", "value": "123.456"},
# STRING_TO_BOOLEAN NOT POSSIBLE
# BOLLEAN:
{"id": 3, "key": "BOOLEAN_TO_STRING", "type": "string", "value": True},
# BOOLEAN TO INT, DOUBLE NOT POSSIBLE
# INTEGER:
{"id": 4, "key": "INTEGER_TO_STRING", "type": "string", "value": 24},
{"id": 5, "key": "INTEGER_TO_FLOAT", "type": "double", "value": 24},
# INTEGER TO BOOLEAN NOT POSSIBLE
# FLOAT:
{"id": 6, "key": "FLOAT_TO_STRING", "type": "string", "value": 24.24},
{"id": 7, "key": "FLOAT_TO_INTEGER", "type": "integer", "value": 24.0},
# FLOAT TO BOOLEAN NOT POSSIBLE
# JSON:
{"id": 8, "key": "JSON_STRING_TO_JSON", "type": "json", "value": '{"json": "json"}'},
# JSON TO BOOLEAN, INT, DOUBLE NOT POSSIBLE
# WRONG CASES
{"id": 9, "key": "WRONG_BOOLEAN", "type": "boolean", "value": "True"},
{"id": 10, "key": "WRONG_JSON_1", "type": "json", "value": True},
{"id": 11, "key": "WRONG_JSON_2", "type": "json", "value": "this_is_a_string"},
{"id": 12, "key": "WRONG_JSON_3", "type": "json", "value": 123},
{"id": 13, "key": "WRONG_JSON_4", "type": "json", "value": 123.234},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 100,
"key": "FR_WRONG_VARIABLE_TYPE",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_0_W_10_20_30_40": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 0,
"key": "FT_T_0_W_10_20_30_40",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_25_W_10_20_30_40": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 25,
"key": "FT_T_25_W_10_20_30_40",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_50_W_10_20_30_40": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 50,
"key": "FT_T_50_W_10_20_30_40",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_75_W_10_20_30_40": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 75,
"key": "FT_T_75_W_10_20_30_40",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_100_W_10_20_30_40": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 100,
"key": "FT_T_100_W_10_20_30_40",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"FT_T_100_W_10_20_30_40_IFEF": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": False,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": False,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 100,
"key": "FT_T_100_W_10_20_30_40_IFEF",
"status": "RUNNING",
"type": "FEATURE_TEST",
}
],
"accountId": 123456,
"version": 2,
},
"NEW_SETTINGS_FILE": {
"campaigns": [
{
"goals": [],
"variations": [{"id": "1", "name": "Control", "weight": 100}],
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "d1"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"id": 29,
"name": "Campaign-29",
"percentTraffic": 50,
"key": "FEATURE_ROLLOUT_KEY",
"status": "RUNNING",
"type": "FEATURE_ROLLOUT",
},
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 50,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "d2"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 321},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 50,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "d1"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 50,
"key": "FEATURE_TEST",
"status": "RUNNING",
"type": "FEATURE_TEST",
},
{
"goals": [{"identifier": "CUSTOM_RECOMMENDATION_AB_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": "1", "name": "Control", "weight": 40},
{"id": "2", "name": "Variation-1", "weight": 60},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 90,
"key": "NEW_RECOMMENDATION_AB_CAMPAIGN",
"status": "RUNNING",
"type": "VISUAL_AB",
},
],
"accountId": 123456,
"version": 2,
},
"T_75_W_10_TIMES_10": {
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 231, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 10},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 10},
{"id": 3, "name": "Variation-2", "changes": {}, "weight": 10},
{"id": 4, "name": "Variation-3", "changes": {}, "weight": 10},
{"id": 5, "name": "Variation-4", "changes": {}, "weight": 10},
{"id": 6, "name": "Variation-5", "changes": {}, "weight": 10},
{"id": 7, "name": "Variation-6", "changes": {}, "weight": 10},
{"id": 8, "name": "Variation-7", "changes": {}, "weight": 10},
{"id": 9, "name": "Variation-8", "changes": {}, "weight": 10},
{"id": 10, "name": "Variation-9", "changes": {}, "weight": 10},
],
"id": 260,
"name": "Campaign-260",
"percentTraffic": 75,
"key": "T_75_W_10_TIMES_10",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 123456,
"version": 2,
},
"T_100_W_50_50_WS": {
"sdkKey": "some_unique_key",
"campaigns": [
{
"percentTraffic": 100,
"goals": [{"identifier": "ddd", "id": 453, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 50},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 50},
],
"id": 174,
"name": "Campaign-174",
"segments": {
"and": [
{"or": [{"custom_variable": {"a": "wildcard(*123*)"}}]},
{"or": [{"custom_variable": {"hello": "regex(world)"}}]},
]
},
"key": "T_100_W_50_50_WS",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"T_50_W_50_50_WS": {
"sdkKey": "some_unique_key",
"campaigns": [
{
"percentTraffic": 50,
"goals": [{"identifier": "ddd", "id": 453, "type": "CUSTOM_GOAL"}],
"variations": [
{"id": 1, "name": "Control", "changes": {}, "weight": 50},
{"id": 2, "name": "Variation-1", "changes": {}, "weight": 50},
],
"id": 174,
"name": "Campaign-174",
"segments": {
"and": [
{"or": [{"custom_variable": {"a": "wildcard(*123*)"}}]},
{"or": [{"custom_variable": {"hello": "regex(world)"}}]},
]
},
"key": "T_50_W_50_50_WS",
"status": "RUNNING",
"type": "VISUAL_AB",
}
],
"accountId": 88888888,
"version": 1,
},
"FT_T_75_W_10_20_30_40_WS": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "FEATURE_TEST_GOAL", "id": 203, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": "1",
"name": "Control",
"weight": 10,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Control string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 123},
],
"isFeatureEnabled": False,
},
{
"id": "2",
"name": "Variation-1",
"weight": 20,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-1 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 456},
],
"isFeatureEnabled": True,
},
{
"id": "3",
"name": "Variation-2",
"weight": 30,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-2 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 789},
],
"isFeatureEnabled": True,
},
{
"id": "4",
"name": "Variation-3",
"weight": 40,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "Variation-3 string"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 100},
],
"isFeatureEnabled": True,
},
],
"id": 22,
"name": "Campaign-22",
"percentTraffic": 75,
"key": "FT_T_75_W_10_20_30_40_WS",
"status": "RUNNING",
"type": "FEATURE_TEST",
"segments": {
"and": [
{"or": [{"custom_variable": {"a": "wildcard(*123*)"}}]},
{"or": [{"custom_variable": {"hello": "regex(world)"}}]},
]
},
}
],
"accountId": 123456,
"version": 2,
},
"T_100_W_33_33_33_WS_WW": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 218, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": 1,
"name": "Control",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"safari": "true"}}]},
},
{
"id": 2,
"name": "Variation-1",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"browser": "wildcard(chrome*)"}}]},
},
{
"id": 3,
"name": "Variation-2",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"chrome": "false"}}]},
},
],
"id": 235,
"name": "Campaign-235",
"percentTraffic": 100,
"key": "T_100_W_33_33_33_WS_WW",
"status": "RUNNING",
"type": "VISUAL_AB",
"isForcedVariationEnabled": True,
"segments": {
"and": [
{"or": [{"custom_variable": {"contains_vwo": "wildcard(*vwo*)"}}]},
{
"and": [
{
"and": [
{
"or": [
{
"and": [
{
"or": [
{
"and": [
{
"or": [
{
"custom_variable": {
"regex_for_all_letters": "regex(^[A-z]+$)"
}
}
]
},
{
"or": [
{
"custom_variable": {
"regex_for_capital_letters": "regex(^[A-Z]+$)"
}
}
]
},
]
},
{
"or": [
{
"custom_variable": {
"regex_for_small_letters": "regex(^[a-z]+$)"
}
}
]
},
]
},
{
"or": [
{
"custom_variable": {
"regex_for_no_zeros": "regex(^[1-9]+$)"
}
}
]
},
]
},
{"or": [{"custom_variable": {"regex_for_zeros": "regex(^[0]+$)"}}]},
]
},
{"or": [{"custom_variable": {"regex_real_number": "regex(^\\d+(\\.\\d+)?)"}}]},
]
},
{
"or": [
{"or": [{"custom_variable": {"this_is_regex": "regex(this\\s+is\\s+text)"}}]},
{
"and": [
{
"and": [
{
"or": [
{
"custom_variable": {
"starts_with": "wildcard(starts_with_variable*)"
}
}
]
},
{
"or": [
{
"custom_variable": {
"contains": "wildcard(*contains_variable*)"
}
}
]
},
]
},
{
"or": [
{
"not": {
"or": [
{
"custom_variable": {
"is_not_equal_to": "is_not_equal_to_variable"
}
}
]
}
},
{
"or": [
{
"custom_variable": {
"is_equal_to": "equal_to_variable"
}
}
]
},
]
},
]
},
]
},
]
},
]
},
}
],
"accountId": 88888888,
"version": 1,
},
"FT_100_W_33_33_33_WS_WW": {
"sdkKey": "someuniquestuff1234567",
"campaigns": [
{
"goals": [{"identifier": "CUSTOM", "id": 218, "type": "CUSTOM_GOAL"}],
"variations": [
{
"id": 1,
"name": "Control",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"safari": "true"}}]},
"isFeatureEnabled": False,
"variables": [
{"id": 1, "key": "STRING_VARIABLE", "type": "string", "value": "CONTROL_STRING_VARIABLE"},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 0},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 0.0},
{
"id": 4,
"key": "JSON_VARIABLE",
"type": "json",
"value": {"data": "CONTROL_JSON_VARIABLE"},
},
],
},
{
"id": 2,
"name": "Variation-1",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"browser": "wildcard(chrome*)"}}]},
"isFeatureEnabled": True,
"variables": [
{
"id": 1,
"key": "STRING_VARIABLE",
"type": "string",
"value": "VARIATION-1_STRING_VARIABLE",
},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 1},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 1.1},
{
"id": 4,
"key": "JSON_VARIABLE",
"type": "json",
"value": {"data": "VARIATION-1_JSON_VARIABLE"},
},
],
},
{
"id": 3,
"name": "Variation-2",
"changes": {},
"weight": 33.3333,
"segments": {"or": [{"custom_variable": {"chrome": "false"}}]},
"isFeatureEnabled": False,
"variables": [
{
"id": 1,
"key": "STRING_VARIABLE",
"type": "string",
"value": "VARIATION-2_STRING_VARIABLE",
},
{"id": 2, "key": "INTEGER_VARIABLE", "type": "integer", "value": 2},
{"id": 3, "key": "FLOAT_VARIABLE", "type": "double", "value": 2.2},
{
"id": 4,
"key": "JSON_VARIABLE",
"type": "json",
"value": {"data": "VARIATION-2_JSON_VARIABLE"},
},
],
},
],
"id": 235,
"name": "Campaign-235",
"percentTraffic": 100,
"key": "FT_100_W_33_33_33_WS_WW",
"status": "RUNNING",
"type": "FEATURE_TEST",
"isForcedVariationEnabled": True,
"segments": {
"and": [
{"or": [{"custom_variable": {"contains_vwo": "wildcard(*vwo*)"}}]},
{
"and": [
{
"and": [
{
"or": [
{
"and": [
{
"or": [
{
"and": [
{
"or": [
{
"custom_variable": {
"regex_for_all_letters": "regex(^[A-z]+$)"
}
}
]
},
{
"or": [
{
"custom_variable": {
"regex_for_capital_letters": "regex(^[A-Z]+$)"
}
}
]
},
]
},
{
"or": [
{
"custom_variable": {
"regex_for_small_letters": "regex(^[a-z]+$)"
}
}
]
},
]
},
{
"or": [
{
"custom_variable": {
"regex_for_no_zeros": "regex(^[1-9]+$)"
}
}
]
},
]
},
{"or": [{"custom_variable": {"regex_for_zeros": "regex(^[0]+$)"}}]},
]
},
{"or": [{"custom_variable": {"regex_real_number": "regex(^\\d+(\\.\\d+)?)"}}]},
]
},
{
"or": [
{"or": [{"custom_variable": {"this_is_regex": "regex(this\\s+is\\s+text)"}}]},
{
"and": [
{
"and": [
{
"or": [
{
"custom_variable": {
"starts_with": "wildcard(starts_with_variable*)"
}
}
]
},
{
"or": [
{
"custom_variable": {
"contains": "wildcard(*contains_variable*)"
}
}
]
},
]
},
{
"or": [
{
"not": {
"or": [
{
"custom_variable": {
"is_not_equal_to": "is_not_equal_to_variable"
}
}
]
}
},
{
"or": [
{
"custom_variable": {
"is_equal_to": "equal_to_variable"
}
}
]
},
]
},
]
},
]
},
]
},
]
},
}
],
"accountId": 88888888,
"version": 1,
},
"GLOBAL_TRACK_SETTINGS_FILE": {
"accountId": 88888888,
"campaigns": [
{
"goals": [
{"id": 1, "identifier": "track1", "type": "CUSTOM_GOAL"},
{"id": 2, "identifier": "track2", "type": "CUSTOM_GOAL"},
{"id": 3, "identifier": "track3", "type": "REVENUE_TRACKING"},
{"id": 4, "identifier": "track4", "type": "REVENUE_TRACKING"},
],
"id": 1,
"name": "Campaign-1",
"isForcedVariationEnabled": False,
"key": "global_test_1",
"percentTraffic": 100,
"segments": {},
"status": "RUNNING",
"type": "VISUAL_AB",
"variations": [
{"changes": {}, "id": 1, "name": "Control", "weight": 33.3333},
{"changes": {}, "id": 2, "name": "Variation-1", "weight": 33.3333},
{"changes": {}, "id": 3, "name": "Variation-2", "weight": 33.3333},
],
},
{
"goals": [
{"id": 1, "identifier": "track1", "type": "CUSTOM_GOAL"},
{"id": 3, "identifier": "track3", "type": "CUSTOM_GOAL"},
{"id": 2, "identifier": "track2", "type": "REVENUE_TRACKING"},
{"id": 4, "identifier": "track4", "type": "REVENUE_TRACKING"},
],
"id": 2,
"name": "Campaign-2",
"isForcedVariationEnabled": False,
"key": "feature_test_1",
"percentTraffic": 100,
"segments": {},
"status": "RUNNING",
"type": "FEATURE_TEST",
"variations": [
{
"changes": {},
"id": 1,
"isFeatureEnabled": False,
"name": "Control",
"variables": [{"id": 1, "key": "string_1", "type": "string", "value": "default"}],
"weight": 50,
},
{
"changes": {},
"id": 2,
"isFeatureEnabled": True,
"name": "Variation-1",
"variables": [{"id": 1, "key": "string_1", "type": "string", "value": "default"}],
"weight": 50,
},
],
},
],
"sdkKey": "someuniquestuff1234567",
"version": 1,
},
}
| 46.379872
| 134
| 0.276382
| 3,712
| 65,442
| 4.686153
| 0.060075
| 0.070365
| 0.037942
| 0.028974
| 0.863467
| 0.83409
| 0.821213
| 0.801955
| 0.759816
| 0.730325
| 0
| 0.066275
| 0.586825
| 65,442
| 1,410
| 135
| 46.412766
| 0.577055
| 0.018062
| 0
| 0.632847
| 0
| 0
| 0.253673
| 0.026282
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
717d97c3c17a26e7c0ba1aa173c5fd1dd4f9516d
| 222
|
py
|
Python
|
python/packages/ibm_application_gateway/runtime/__init__.py
|
IBM-Security/ibm-application-gateway-resources
|
7263a48bbd158d1857a44a99449661b1a0c86793
|
[
"Apache-2.0"
] | null | null | null |
python/packages/ibm_application_gateway/runtime/__init__.py
|
IBM-Security/ibm-application-gateway-resources
|
7263a48bbd158d1857a44a99449661b1a0c86793
|
[
"Apache-2.0"
] | null | null | null |
python/packages/ibm_application_gateway/runtime/__init__.py
|
IBM-Security/ibm-application-gateway-resources
|
7263a48bbd158d1857a44a99449661b1a0c86793
|
[
"Apache-2.0"
] | 1
|
2020-10-20T08:30:27.000Z
|
2020-10-20T08:30:27.000Z
|
"""
Copyright contributors to the Application Gateway project
"""
from __future__ import absolute_import
from ibm_application_gateway.runtime.oidc_rp import *
from ibm_application_gateway.runtime.web_server import *
| 22.2
| 57
| 0.824324
| 28
| 222
| 6.142857
| 0.607143
| 0.313953
| 0.151163
| 0.27907
| 0.44186
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121622
| 222
| 9
| 58
| 24.666667
| 0.882051
| 0.256757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
717fa11c7c5beff8dbf8a8693be7db6778e1306e
| 216
|
py
|
Python
|
repository/SamplesRepository.py
|
karim2196/btc_sentiment_analysis
|
549327cefea97c93872c54776dd85b96b697fb28
|
[
"MIT"
] | null | null | null |
repository/SamplesRepository.py
|
karim2196/btc_sentiment_analysis
|
549327cefea97c93872c54776dd85b96b697fb28
|
[
"MIT"
] | null | null | null |
repository/SamplesRepository.py
|
karim2196/btc_sentiment_analysis
|
549327cefea97c93872c54776dd85b96b697fb28
|
[
"MIT"
] | null | null | null |
from nltk.corpus import twitter_samples
def getNegativeTweets():
return twitter_samples.strings('negative_tweets.json')
def getPositiveTweets():
return twitter_samples.strings('positive_tweets.json')
| 24
| 58
| 0.782407
| 24
| 216
| 6.833333
| 0.625
| 0.256098
| 0.243902
| 0.329268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 216
| 9
| 59
| 24
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0.184332
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
718b26dfb685524201380d1ad32ca232159a7002
| 134
|
py
|
Python
|
src/handwriting_sample/visualizer/__init__.py
|
BDALab/handwriting-sample
|
a95913ade64b1854369db69ca77181cc33601225
|
[
"MIT"
] | 2
|
2021-11-23T10:40:37.000Z
|
2022-02-13T08:13:24.000Z
|
src/handwriting_sample/visualizer/__init__.py
|
BDALab/handwriting-sample
|
a95913ade64b1854369db69ca77181cc33601225
|
[
"MIT"
] | null | null | null |
src/handwriting_sample/visualizer/__init__.py
|
BDALab/handwriting-sample
|
a95913ade64b1854369db69ca77181cc33601225
|
[
"MIT"
] | null | null | null |
from handwriting_sample.visualizer.interface import HandwritingSampleVisualizer
from handwriting_sample.visualizer.exceptions import *
| 67
| 79
| 0.910448
| 13
| 134
| 9.230769
| 0.615385
| 0.25
| 0.35
| 0.516667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052239
| 134
| 2
| 80
| 67
| 0.944882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
71cb7ece14d873fb0b0738217e0cb30048adbac8
| 46
|
py
|
Python
|
reporter/__init__.py
|
cohnj/reporter
|
cbb80f77f70e3c85e8bc1cf124e7829b51c4a426
|
[
"BSD-3-Clause"
] | 1
|
2019-04-14T21:02:22.000Z
|
2019-04-14T21:02:22.000Z
|
crusher/service/reporter/__init__.py
|
thmp/crushedice
|
ae2eb2ea91ac417016f2a361fd8a9e1fb43e9626
|
[
"MIT"
] | null | null | null |
crusher/service/reporter/__init__.py
|
thmp/crushedice
|
ae2eb2ea91ac417016f2a361fd8a9e1fb43e9626
|
[
"MIT"
] | null | null | null |
import reporter
from reporter import Reporter
| 15.333333
| 29
| 0.869565
| 6
| 46
| 6.666667
| 0.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 46
| 2
| 30
| 23
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e08e46bd3f87551975b0a221f492e9a56dfa770a
| 295
|
py
|
Python
|
ulmo/ncdc/cirs/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 123
|
2015-01-29T12:35:52.000Z
|
2021-12-15T21:09:33.000Z
|
ulmo/ncdc/cirs/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 107
|
2015-01-05T17:56:22.000Z
|
2021-11-19T22:46:23.000Z
|
ulmo/ncdc/cirs/__init__.py
|
sblack-usu/ulmo
|
3213bf0302b44e77abdff1f3f66e7f1083571ce8
|
[
"BSD-3-Clause"
] | 49
|
2015-02-15T18:11:34.000Z
|
2022-01-25T14:25:32.000Z
|
"""
`National Climatic Data Center`_ `Climate Index Reference Sequential
(CIRS)`_ drought dataset
.. _National Climatic Data Center: http://www.ncdc.noaa.gov
.. _Climate Index Reference Sequential (CIRS): http://www1.ncdc.noaa.gov/pub/data/cirs/
"""
from .core import get_data
| 29.5
| 91
| 0.711864
| 38
| 295
| 5.394737
| 0.578947
| 0.156098
| 0.195122
| 0.253659
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004049
| 0.162712
| 295
| 9
| 92
| 32.777778
| 0.825911
| 0.820339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e0caa9c3e3a191fd0c4ef7bd1742e67204e5c65e
| 38,557
|
py
|
Python
|
sandbox/lib/jumpscale/JumpscaleLibs/clients/itsyouonline/generated/client/organizations_service.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | 2
|
2019-05-09T07:21:25.000Z
|
2019-08-05T06:37:53.000Z
|
sandbox/lib/jumpscale/JumpscaleLibs/clients/itsyouonline/generated/client/organizations_service.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | 664
|
2018-12-19T12:43:44.000Z
|
2019-08-23T04:24:42.000Z
|
sandbox/lib/jumpscale/JumpscaleLibs/clients/itsyouonline/generated/client/organizations_service.py
|
threefoldtech/threebot_prebuilt
|
1f0e1c65c14cef079cd80f73927d7c8318755c48
|
[
"Apache-2.0"
] | 7
|
2019-05-03T07:14:37.000Z
|
2019-08-05T12:36:52.000Z
|
from .APIKeyLabel import APIKeyLabel
from .Contract import Contract
from .DnsAddress import DnsAddress
from .Error import Error
from .GetOrganizationUsersResponseBody import GetOrganizationUsersResponseBody
from .IsMember import IsMember
from .JoinOrganizationInvitation import JoinOrganizationInvitation
from .LocalizedInfoText import LocalizedInfoText
from .Organization import Organization
from .OrganizationAPIKey import OrganizationAPIKey
from .OrganizationLogo import OrganizationLogo
from .OrganizationTreeItem import OrganizationTreeItem
from .RegistryEntry import RegistryEntry
from .ValidityTime import ValidityTime
from .api_response import APIResponse
from .unhandled_api_error import UnhandledAPIError
from .unmarshall_error import UnmarshallError
from Jumpscale import j
class OrganizationsService:
def __init__(self, client):
pass
self.client = client
def Get2faValidityTime(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the 2FA validity time for the organization, in seconds
It is method for GET /organizations/{globalid}/2fa/validity
"""
uri = self.client.base_url + "/organizations/" + globalid + "/2fa/validity"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=ValidityTime(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def Set2faValidityTime(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Update the 2FA validity time for the organization
It is method for PUT /organizations/{globalid}/2fa/validity
"""
uri = self.client.base_url + "/organizations/" + globalid + "/2fa/validity"
return self.client.put(uri, data, headers, query_params, content_type)
def DeleteOrganizationAPIKey(
self, label, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes an API key
It is method for DELETE /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationAPIKey(self, label, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get an api key from an organization
It is method for GET /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationAPIKey(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateOrganizationAPIKey(
self, data, label, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates the label or other properties of a key.
It is method for PUT /organizations/{globalid}/apikeys/{label}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys/" + label
return self.client.put(uri, data, headers, query_params, content_type)
def GetOrganizationAPIKeyLabels(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the list of active api keys.
It is method for GET /organizations/{globalid}/apikeys
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(APIKeyLabel(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewOrganizationAPIKey(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new API Key, a secret itself should not be provided, it will be generated serverside.
It is method for POST /organizations/{globalid}/apikeys
"""
uri = self.client.base_url + "/organizations/" + globalid + "/apikeys"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=OrganizationAPIKey(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetOrganizationContracts(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the contracts where the organization is 1 of the parties. Order descending by date.
It is method for GET /organizations/{globalid}/contracts
"""
uri = self.client.base_url + "/organizations/" + globalid + "/contracts"
return self.client.get(uri, None, headers, query_params, content_type)
def CreateOrganizationContracty(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new contract.
It is method for POST /organizations/{globalid}/contracts
"""
uri = self.client.base_url + "/organizations/" + globalid + "/contracts"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Contract(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetDescriptionWithFallback(
self, langkey, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Get the description for an organization for this langkey, try to use the English is there is no description for this langkey
It is method for GET /organizations/{globalid}/description/{langkey}/withfallback
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey + "/withfallback"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteDescription(self, langkey, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Delete the description for this organization for a given language key
It is method for DELETE /organizations/{globalid}/description/{langkey}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey
return self.client.delete(uri, None, headers, query_params, content_type)
def GetDescription(self, langkey, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the description for an organization for this langkey
It is method for GET /organizations/{globalid}/description/{langkey}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description/" + langkey
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def SetDescription(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Set the description for this organization for a given language key
It is method for POST /organizations/{globalid}/description
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateDescription(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Update the description for this organization for a given language key
It is method for PUT /organizations/{globalid}/description
"""
uri = self.client.base_url + "/organizations/" + globalid + "/description"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=LocalizedInfoText(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationDns(
self, dnsname, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes a DNS name associated with an organization
It is method for DELETE /organizations/{globalid}/dns/{dnsname}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns/" + dnsname
return self.client.delete(uri, None, headers, query_params, content_type)
def UpdateOrganizationDns(
self, data, dnsname, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates an existing DNS name associated with an organization
It is method for PUT /organizations/{globalid}/dns/{dnsname}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns/" + dnsname
return self.client.put(uri, data, headers, query_params, content_type)
def CreateOrganizationDns(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Creates a new DNS name associated with an organization
It is method for POST /organizations/{globalid}/dns
"""
uri = self.client.base_url + "/organizations/" + globalid + "/dns"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=DnsAddress(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemovePendingOrganizationInvitation(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Cancel a pending invitation.
It is method for DELETE /organizations/{globalid}/invitations/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/invitations/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def GetInvitations(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the list of pending invitations for users to join this organization.
It is method for GET /organizations/{globalid}/invitations
"""
uri = self.client.base_url + "/organizations/" + globalid + "/invitations"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(JoinOrganizationInvitation(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationLogo(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Removes the Logo from an organization
It is method for DELETE /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationLogo(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get the Logo from an organization
It is method for GET /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationLogo(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def SetOrganizationLogo(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Set the organization Logo for the organization
It is method for PUT /organizations/{globalid}/logo
"""
uri = self.client.base_url + "/organizations/" + globalid + "/logo"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=OrganizationLogo(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemoveOrganizationMember(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove a member from an organization.
It is method for DELETE /organizations/{globalid}/members/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def AddOrganizationMember(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Invite someone to become member of an organization.
It is method for POST /organizations/{globalid}/members
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UpdateOrganizationMemberShip(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Update an organization membership
It is method for PUT /organizations/{globalid}/members
"""
uri = self.client.base_url + "/organizations/" + globalid + "/members"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RejectOrganizationInvite(
self, invitingorg, role, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Reject the invite for one of your organizations
It is method for DELETE /organizations/{globalid}/organizations/{invitingorg}/roles/{role}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/organizations/" + invitingorg + "/roles/" + role
return self.client.delete(uri, None, headers, query_params, content_type)
def AcceptOrganizationInvite(
self, data, invitingorg, role, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Accept the invite for one of your organizations
It is method for POST /organizations/{globalid}/organizations/{invitingorg}/roles/{role}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/organizations/" + invitingorg + "/roles/" + role
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def RemoveIncludeSubOrgsOf(
self, orgmember, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove an orgmember or orgowner organization to the includesuborgsof list
It is method for DELETE /organizations/{globalid}/orgmembers/includesuborgs/{orgmember}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/includesuborgs/" + orgmember
return self.client.delete(uri, None, headers, query_params, content_type)
def AddIncludeSubOrgsOf(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add an orgmember or orgowner organization to the includesuborgsof list
It is method for POST /organizations/{globalid}/orgmembers/includesuborgs
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/includesuborgs"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrgMember(self, globalid2, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Remove an organization as a member
It is method for DELETE /organizations/{globalid}/orgmembers/{globalid2}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers/" + globalid2
return self.client.delete(uri, None, headers, query_params, content_type)
def SetOrgMember(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add another organization as a member of this one
It is method for POST /organizations/{globalid}/orgmembers
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers"
return self.client.post(uri, data, headers, query_params, content_type)
def UpdateOrganizationOrgMemberShip(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Update the membership status of an organization
It is method for PUT /organizations/{globalid}/orgmembers
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgmembers"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrgOwner(self, globalid2, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Remove an organization as an owner
It is method for DELETE /organizations/{globalid}/orgowners/{globalid2}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgowners/" + globalid2
return self.client.delete(uri, None, headers, query_params, content_type)
def SetOrgOwner(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Add another organization as an owner of this one
It is method for POST /organizations/{globalid}/orgowners
"""
uri = self.client.base_url + "/organizations/" + globalid + "/orgowners"
return self.client.post(uri, data, headers, query_params, content_type)
def RemoveOrganizationOwner(
self, username, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Remove an owner from organization
It is method for DELETE /organizations/{globalid}/owners/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/owners/" + username
return self.client.delete(uri, None, headers, query_params, content_type)
def AddOrganizationOwner(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Invite someone to become owner of an organization.
It is method for POST /organizations/{globalid}/owners
"""
uri = self.client.base_url + "/organizations/" + globalid + "/owners"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=JoinOrganizationInvitation(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganizationRegistryEntry(
self, key, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Removes a RegistryEntry from the organization's registry
It is method for DELETE /organizations/{globalid}/registry/{key}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry/" + key
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganizationRegistryEntry(
self, key, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Get a RegistryEntry from the organization's registry.
It is method for GET /organizations/{globalid}/registry/{key}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry/" + key
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=RegistryEntry(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def ListOrganizationRegistry(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Lists the RegistryEntries in an organization's registry.
It is method for GET /organizations/{globalid}/registry
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(RegistryEntry(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def AddOrganizationRegistryEntry(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Adds a RegistryEntry to the organization's registry, if the key is already used, it is overwritten.
It is method for POST /organizations/{globalid}/registry
"""
uri = self.client.base_url + "/organizations/" + globalid + "/registry"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=RegistryEntry(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteRequiredScope(
self, requiredscope, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Deletes a required scope
It is method for DELETE /organizations/{globalid}/requiredscopes/{requiredscope}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes/" + requiredscope
return self.client.delete(uri, None, headers, query_params, content_type)
def UpdateRequiredScope(
self, data, requiredscope, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Updates a required scope
It is method for PUT /organizations/{globalid}/requiredscopes/{requiredscope}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes/" + requiredscope
return self.client.put(uri, data, headers, query_params, content_type)
def AddRequiredScope(self, data, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Adds a required scope
It is method for POST /organizations/{globalid}/requiredscopes
"""
uri = self.client.base_url + "/organizations/" + globalid + "/requiredscopes"
return self.client.post(uri, data, headers, query_params, content_type)
def GetOrganizationTree(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Tree structure of all suborganizations
It is method for GET /organizations/{globalid}/tree
"""
uri = self.client.base_url + "/organizations/" + globalid + "/tree"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(OrganizationTreeItem(elem))
return APIResponse(data=resps, response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def UserIsMember(self, username, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Checks if the user has memberschip rights on the organization
It is method for GET /organizations/{globalid}/users/ismember/{username}
"""
uri = self.client.base_url + "/organizations/" + globalid + "/users/ismember/" + username
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=IsMember(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def GetOrganizationUsers(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get all users from this organization, not including suborganizations.
It is method for GET /organizations/{globalid}/users
"""
uri = self.client.base_url + "/organizations/" + globalid + "/users"
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=GetOrganizationUsersResponseBody(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def DeleteOrganization(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Deletes an organization and all data linked to it (join-organization-invitations, oauth_access_tokens, oauth_clients, logo)
It is method for DELETE /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
return self.client.delete(uri, None, headers, query_params, content_type)
def GetOrganization(self, globalid, headers=None, query_params=None, content_type="application/json"):
"""
Get organization info
It is method for GET /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
resp = self.client.get(uri, None, headers, query_params, content_type)
try:
if resp.status_code == 200:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewSubOrganization(
self, data, globalid, headers=None, query_params=None, content_type="application/json"
):
"""
Create a new suborganization.
It is method for POST /organizations/{globalid}
"""
uri = self.client.base_url + "/organizations/" + globalid
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def CreateNewOrganization(self, data, headers=None, query_params=None, content_type="application/json"):
"""
Create a new organization. 1 user should be in the owners list. Validation is performed to check if the securityScheme allows management on this user.
It is method for POST /organizations
"""
uri = self.client.base_url + "/organizations"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
return APIResponse(data=Organization(resp.json()), response=resp)
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(response=resp, code=resp.status_code, message=message)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
| 47.309202
| 158
| 0.651607
| 4,191
| 38,557
| 5.912431
| 0.061083
| 0.0452
| 0.04746
| 0.044392
| 0.878768
| 0.87554
| 0.872876
| 0.854474
| 0.810283
| 0.797127
| 0
| 0.003466
| 0.251653
| 38,557
| 814
| 159
| 47.367322
| 0.855306
| 0.152138
| 0
| 0.79588
| 0
| 0
| 0.08632
| 0.001702
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095506
| false
| 0.001873
| 0.033708
| 0
| 0.224719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ca345c19a2bd594cb90544a1df980a48e9ac454
| 15,884
|
py
|
Python
|
linearSolvers/traditionalElement.py
|
junbinhuang/mofem
|
6ed2ea5b7a8fbb1f8f0954636f6326c706da302c
|
[
"MIT"
] | 4
|
2020-05-17T03:07:30.000Z
|
2022-02-18T07:42:17.000Z
|
linearSolvers/traditionalElement.py
|
junbinhuang/mofem
|
6ed2ea5b7a8fbb1f8f0954636f6326c706da302c
|
[
"MIT"
] | null | null | null |
linearSolvers/traditionalElement.py
|
junbinhuang/mofem
|
6ed2ea5b7a8fbb1f8f0954636f6326c706da302c
|
[
"MIT"
] | 2
|
2020-05-23T06:49:41.000Z
|
2021-06-04T05:20:29.000Z
|
import numpy as np
import scipy
# from sksparse.cholmod import cholesky # It works (from Terminal).
from scipy.sparse.linalg import spsolve
import time
import sys, os
sys.path.append(os.path.dirname(sys.path[0]))
from elementLibrary import stiffnessMatrix, shapeFunction
from otherFunctions import numericalIntegration
from linearSolvers import AMORE
from meshTools import toDC
def lowerOrderFE(inputData):
"""The solver for lower order finite elements (4-node quads & 3-node triangles)."""
startTime=time.time()
parameters=inputData[0]
# Material matrices.
materialList=inputData[6]
materialMatrix=[None]*len(materialList)
for i in range(len(materialList)):
materialMatrix[i]=twoDMaterialMatrix(materialList[i],parameters[1])
# Assemble stiffness matrix.
coordinates=inputData[5]
meshes=inputData[3]
materialMeshList=inputData[4]
Iglo=[]
Jglo=[]
Vglo=[]
for i in range(len(meshes[0])):
coord=AMORE.getCoord(coordinates,meshes[0][i])
if len(meshes[0][i])==3:
Kloc=stiffnessMatrix.triFE(coord,materialMatrix[materialMeshList[0][i]])
elif len(meshes[0][i])==4:
Kloc=stiffnessMatrix.quadFE(coord,materialMatrix[materialMeshList[0][i]])
else: raise ValueError("Wrong element numbering!")
Iloc,Jloc,Vloc=stiffnessMatrix.sparsifyElementMatrix(Kloc,meshes[0][i])
Iglo.extend(Iloc)
Jglo.extend(Jloc)
Vglo.extend(Vloc)
Iglo=np.array(Iglo,dtype=int)
Jglo=np.array(Jglo,dtype=int)
Vglo=np.array(Vglo,dtype='d')
Kglo=scipy.sparse.coo_matrix((Vglo,(Iglo,Jglo)),shape=(2*len(coordinates),2*len(coordinates))).tocsr()
print("Assembling stiffness matrix costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Force term.
indForce=inputData[-2]
if indForce[0]: # Body force is imposed.
pass
forceList=inputData[-1]
nInt=2
if indForce[1]: nInt+=3 # Customized boundary force.
pos,wei=numericalIntegration.gaussQuad(nInt)
fglo=np.zeros((2*len(coordinates),1))
for i in range(len(forceList)//2):
node1=forceList[2*i][0]
node2=forceList[2*i+1][0]
length=lenEdge(coordinates[node1],coordinates[node2])
force1=np.array([forceList[2*i][1:3]]).transpose()
force2=np.array([forceList[2*i+1][1:3]]).transpose()
floc=np.zeros((4,1))
for j in range(nInt):
Nmat=shapeFunction.oneDLinear(pos[j])
force=Nmat[0,0]*force1+Nmat[0,2]*force2
floc+=0.5*wei[j]*length*np.matmul(Nmat.transpose(),force)
fglo[2*node1:2*node1+2,0]+=floc[0:2,0]
fglo[2*node2:2*node2+2,0]+=floc[2:4,0]
print("Calculating force term costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Impose constraints.
fixList=np.zeros((2*len(coordinates),1))
fixIndexList=np.zeros((2*len(coordinates),1),dtype=int)
constraintList=inputData[-3]
# Very important!!! Sort the constraints!!!
constraintList.sort(key=lambda item:item[0])
for i in constraintList:
if i[1]:
fixList[2*i[0]]=i[3]
fixIndexList[2*i[0]]=1
if i[2]:
fixList[2*i[0]+1]=i[4]
fixIndexList[2*i[0]+1]=1
# Solve.
fglo-=(Kglo.dot(fixList))
Kglo_complete=Kglo.copy()
Kglo=Kglo.tolil()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
fglo=np.delete(fglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
fglo=np.delete(fglo,2*i[0]+1-count)
count+=1
Kglo=Kglo.transpose()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
count+=1
print("Imposing constraints costs %s seconds."%(time.time()-startTime))
startTime=time.time()
Kglo=Kglo.tocsc()
print("Number of non-zero sparse matrix entries = %s."%Kglo.count_nonzero())
# factor=cholesky(Kglo)
# disp=factor(fglo)
disp=spsolve(Kglo,fglo)
print("Solving the linear system costs %s seconds."%(time.time()-startTime))
# The complete displacement solution:
displacement=np.zeros((2*len(coordinates),1))
count=0
for i in range(2*len(coordinates)):
if fixIndexList[i]:
displacement[i]=fixList[i]
count+=1
else:
displacement[i]=disp[i-count]
energy=0.5*displacement.transpose()@Kglo_complete@displacement
return displacement,energy
def ICMFE(inputData):
"""The solver for (4-node) ICM finite elements. Warning: The code is only for squares.
For general quadrilaterals, the formulation needs to be modified to pass patch tests."""
startTime=time.time()
parameters=inputData[0]
# Material matrices.
materialList=inputData[6]
materialMatrix=[None]*len(materialList)
for i in range(len(materialList)):
materialMatrix[i]=twoDMaterialMatrix(materialList[i],parameters[1])
# Assemble stiffness matrix.
coordinates=inputData[5]
meshes=inputData[3]
materialMeshList=inputData[4]
Iglo=[]
Jglo=[]
Vglo=[]
for i in range(len(meshes[0])):
coord=AMORE.getCoord(coordinates,meshes[0][i])
if len(meshes[0][i])==4:
Kloc,_,_=stiffnessMatrix.ICMFE(coord,materialMatrix[materialMeshList[0][i]])
else: raise ValueError("Wrong element numbering!")
Iloc,Jloc,Vloc=stiffnessMatrix.sparsifyElementMatrix(Kloc,meshes[0][i])
Iglo.extend(Iloc)
Jglo.extend(Jloc)
Vglo.extend(Vloc)
Iglo=np.array(Iglo,dtype=int)
Jglo=np.array(Jglo,dtype=int)
Vglo=np.array(Vglo,dtype='d')
Kglo=scipy.sparse.coo_matrix((Vglo,(Iglo,Jglo)),shape=(2*len(coordinates),2*len(coordinates))).tocsr()
print("Assembling stiffness matrix costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Force term.
indForce=inputData[-2]
if indForce[0]: # Body force is imposed.
pass
forceList=inputData[-1]
nInt=2
if indForce[1]: nInt+=3 # Customized boundary force.
pos,wei=numericalIntegration.gaussQuad(nInt)
fglo=np.zeros((2*len(coordinates),1))
for i in range(len(forceList)//2):
node1=forceList[2*i][0]
node2=forceList[2*i+1][0]
length=lenEdge(coordinates[node1],coordinates[node2])
force1=np.array([forceList[2*i][1:3]]).transpose()
force2=np.array([forceList[2*i+1][1:3]]).transpose()
floc=np.zeros((4,1))
for j in range(nInt):
Nmat=shapeFunction.oneDLinear(pos[j])
force=Nmat[0,0]*force1+Nmat[0,2]*force2
floc+=0.5*wei[j]*length*np.matmul(Nmat.transpose(),force)
fglo[2*node1:2*node1+2,0]+=floc[0:2,0]
fglo[2*node2:2*node2+2,0]+=floc[2:4,0]
print("Calculating force term costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Impose constraints.
fixList=np.zeros((2*len(coordinates),1))
fixIndexList=np.zeros((2*len(coordinates),1),dtype=int)
constraintList=inputData[-3]
# Very important!!! Sort the constraints!!!
constraintList.sort(key=lambda item:item[0])
for i in constraintList:
if i[1]:
fixList[2*i[0]]=i[3]
fixIndexList[2*i[0]]=1
if i[2]:
fixList[2*i[0]+1]=i[4]
fixIndexList[2*i[0]+1]=1
# Solve.
fglo-=(Kglo.dot(fixList))
Kglo_complete=Kglo.copy()
Kglo=Kglo.tolil()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
fglo=np.delete(fglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
fglo=np.delete(fglo,2*i[0]+1-count)
count+=1
Kglo=Kglo.transpose()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
count+=1
print("Imposing constraints costs %s seconds."%(time.time()-startTime))
startTime=time.time()
Kglo=Kglo.tocsc()
print("Number of non-zero sparse matrix entries = %s."%Kglo.count_nonzero())
# factor=cholesky(Kglo)
# disp=factor(fglo)
disp=spsolve(Kglo,fglo)
print("Solving the linear system costs %s seconds."%(time.time()-startTime))
# The complete displacement solution:
displacement=np.zeros((2*len(coordinates),1))
count=0
for i in range(2*len(coordinates)):
if fixIndexList[i]:
displacement[i]=fixList[i]
count+=1
else:
displacement[i]=disp[i-count]
energy=0.5*displacement.transpose()@Kglo_complete@displacement
return displacement,energy
def quadraticFE(inputData):
"""The solver for second order finite elements (9-node quads & 6-node triangles)."""
startTime=time.time()
parameters=inputData[0]
# Material matrices.
materialList=inputData[6]
materialMatrix=[None]*len(materialList)
for i in range(len(materialList)):
materialMatrix[i]=twoDMaterialMatrix(materialList[i],parameters[1])
# Assemble stiffness matrix.
coordinates=inputData[5]
meshes=inputData[3]
materialMeshList=inputData[4]
nodeElements=toDC.nodeElementList(coordinates,meshes)
Iglo=[]
Jglo=[]
Vglo=[]
for i in range(len(meshes[0])):
coord=AMORE.getCoord(coordinates,meshes[0][i])
if len(meshes[0][i])==6:
Kloc=stiffnessMatrix.triQuadFE(coord,materialMatrix[materialMeshList[0][i]])
elif len(meshes[0][i])==9:
Kloc=stiffnessMatrix.quadQuadFE(coord,materialMatrix[materialMeshList[0][i]])
else: raise ValueError("Wrong element numbering!")
Iloc,Jloc,Vloc=stiffnessMatrix.sparsifyElementMatrix(Kloc,meshes[0][i])
Iglo.extend(Iloc)
Jglo.extend(Jloc)
Vglo.extend(Vloc)
Iglo=np.array(Iglo,dtype=int)
Jglo=np.array(Jglo,dtype=int)
Vglo=np.array(Vglo,dtype='d')
Kglo=scipy.sparse.coo_matrix((Vglo,(Iglo,Jglo)),shape=(2*len(coordinates),2*len(coordinates))).tocsr()
print("Assembling stiffness matrix costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Force term.
indForce=inputData[-2]
if indForce[0]: # Body force is imposed.
pass
forceList=inputData[-1]
nInt=3
if indForce[1]: nInt+=2 # Customized boundary force.
pos,wei=numericalIntegration.gaussQuad(nInt)
fglo=np.zeros((2*len(coordinates),1))
for i in range(len(forceList)//2):
node1=forceList[2*i][0]
node2=forceList[2*i+1][0]
# length=lenEdge(coordinates[node1],coordinates[node2])
# Find the element.
elementPosition=(set(nodeElements[node1]) & set(nodeElements[node2])).pop()
numbering=meshes[elementPosition[0]][elementPosition[1]]
node3=findMidNode(numbering,node1,node2)
force1=np.array([forceList[2*i][1:3]]).transpose()
force2=np.array([forceList[2*i+1][1:3]]).transpose()
floc=np.zeros((6,1))
coord=np.array([coordinates[node1],coordinates[node2],[0.0,0.0]])
if coordinates[node3]: coord[2,:]=np.array(coordinates[node3])
else: coord[2,:]=0.5*(coord[0,:]+coord[1,:])
for j in range(nInt):
# Only support linear force distribution.
# Otherwise, use customized boundary force.
Nmat=shapeFunction.oneDLinear(pos[j])
force=Nmat[0,0]*force1+Nmat[0,2]*force2
quadNmat,Jacobian=shapeFunction.oneDQuadratic(pos[j],coord)
floc+=wei[j]*Jacobian*np.matmul(quadNmat.transpose(),force)
fglo[2*node1:2*node1+2,0]+=floc[0:2,0]
fglo[2*node2:2*node2+2,0]+=floc[2:4,0]
fglo[2*node3:2*node3+2,0]+=floc[4:6,0]
print("Calculating force term costs %s seconds."%(time.time()-startTime))
startTime=time.time()
# Impose constraints.
fixList=np.zeros((2*len(coordinates),1))
fixIndexList=np.zeros((2*len(coordinates),1),dtype=int)
constraintList=inputData[-3]
# Very important!!! Sort the constraints!!!
constraintList.sort(key=lambda item:item[0])
for i in constraintList:
if i[1]:
fixList[2*i[0]]=i[3]
fixIndexList[2*i[0]]=1
if i[2]:
fixList[2*i[0]+1]=i[4]
fixIndexList[2*i[0]+1]=1
# Solve.
fglo-=(Kglo.dot(fixList))
Kglo_complete=Kglo.copy()
Kglo=Kglo.tolil()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
fglo=np.delete(fglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
fglo=np.delete(fglo,2*i[0]+1-count)
count+=1
Kglo=Kglo.transpose()
count=0
for i in constraintList:
if i[1]:
delete_row_lil(Kglo,2*i[0]-count)
count+=1
if i[2]:
delete_row_lil(Kglo,2*i[0]+1-count)
count+=1
print("Imposing constraints costs %s seconds."%(time.time()-startTime))
startTime=time.time()
Kglo=Kglo.tocsc()
# factor=cholesky(Kglo)
# disp=factor(fglo)
disp=spsolve(Kglo,fglo)
print("Solving the linear system costs %s seconds."%(time.time()-startTime))
# The complete displacement solution:
displacement=np.zeros((2*len(coordinates),1))
count=0
for i in range(2*len(coordinates)):
if fixIndexList[i]:
displacement[i]=fixList[i]
count+=1
else:
displacement[i]=disp[i-count]
energy=0.5*displacement.transpose()@Kglo_complete@displacement
return displacement,energy
def findMidNode(numbering,node1,node2):
if len(numbering)==6:
temp=[1,2,0]
for i in range(3):
if (numbering[i]==node1 and numbering[temp[i]]==node2) or \
(numbering[i]==node2 and numbering[temp[i]]==node1):
return numbering[i+3]
elif len(numbering)==9:
temp=[1,2,3,0]
for i in range(4):
if (numbering[i]==node1 and numbering[temp[i]]==node2) or \
(numbering[i]==node2 and numbering[temp[i]]==node1):
return numbering[i+4]
return None
def twoDMaterialMatrix(material,problemType):
"""Input:
material: [E,nu];
problemType: 1 -- Plane stress; 2 -- Plane strain."""
if problemType==1: # Plane stress
Dmat=np.array([[material[0]/(1.0-material[1]**2),material[0]*material[1]/(1.0-material[1]**2),0.0],\
[material[0]*material[1]/(1.0-material[1]**2),material[0]/(1.0-material[1]**2),0.0],\
[0.0,0.0,material[0]/2.0/(1.0+material[1])]])
elif problemType==2: #Plane strain
cc=material[0]*(1.0-material[1])/(1.0+material[1])/(1.0-2.0*material[1])
Dmat=np.array([[cc,cc*material[1]/(1.0-material[1]),0.0],\
[cc*material[1]/(1.0-material[1]),cc,0.0],\
[0.0,0.0,cc*(1.0-2.0*material[1])/(2.0*(1.0-material[1]))]])
else: raise ValueError("No such problem type!")
return Dmat
def lenEdge(coord1,coord2):
return ((coord1[0]-coord2[0])**2+(coord1[1]-coord2[1])**2)**0.5
def delete_row_lil(matrix,i):
if not isinstance(matrix,scipy.sparse.lil_matrix):
raise ValueError("The matrix should be in LIL format!")
matrix.rows=np.delete(matrix.rows,i)
matrix.data=np.delete(matrix.data,i)
matrix._shape=(matrix._shape[0]-1,matrix._shape[1])
if __name__=="__main__":
pass
| 30.197719
| 108
| 0.612377
| 2,156
| 15,884
| 4.487941
| 0.106215
| 0.008681
| 0.010232
| 0.007441
| 0.813973
| 0.811492
| 0.802398
| 0.786792
| 0.786792
| 0.780591
| 0
| 0.043137
| 0.230861
| 15,884
| 526
| 109
| 30.197719
| 0.748875
| 0.088706
| 0
| 0.812325
| 0
| 0
| 0.050643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0.011204
| 0.02521
| 0.002801
| 0.067227
| 0.039216
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ce5a8bc46bb2ae5b2f6c652a74097dd3b7edb2a
| 418
|
py
|
Python
|
redlist/logo.py
|
habby9000/RedList
|
0da231a51ad0b59ef93c84d4b72ba82e8dce281a
|
[
"MIT"
] | null | null | null |
redlist/logo.py
|
habby9000/RedList
|
0da231a51ad0b59ef93c84d4b72ba82e8dce281a
|
[
"MIT"
] | null | null | null |
redlist/logo.py
|
habby9000/RedList
|
0da231a51ad0b59ef93c84d4b72ba82e8dce281a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
def print_logo():
"""
___ __ __ _ __
/ _ \___ ___/ / / / (_)__ / /_
/ , _/ -_) _ / / /__/ (_-</ __/
/_/|_|\__/\_,_/ /____/_/___/\__/
by Team Avengers
MIT LICENSE
"""
print(" ___ __ __ _ __ \n / _ \___ ___/ / / / (_)__ / /_\n / , _/ -_) _ / / /__/ (_-</ __/\n/_/|_|\__/\_,_/ /____/_/___/\__/ \n")
print("by Team Avengers\nMIT LICENSE\n\n")
| 29.857143
| 150
| 0.425837
| 24
| 418
| 3.625
| 0.541667
| 0.091954
| 0.321839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00346
| 0.308612
| 418
| 13
| 151
| 32.153846
| 0.297578
| 0.435407
| 0
| 0
| 0
| 0.333333
| 0.797235
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 1
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
e80ce8c5d72500feb89db57ceb2977d2b80bba3d
| 8,110
|
gyp
|
Python
|
chrome/browser/resources/settings/people_page/compiled_resources2.gyp
|
google-ar/chromium
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 777
|
2017-08-29T15:15:32.000Z
|
2022-03-21T05:29:41.000Z
|
chrome/browser/resources/settings/people_page/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 66
|
2017-08-30T18:31:18.000Z
|
2021-08-02T10:59:35.000Z
|
chrome/browser/resources/settings/people_page/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 123
|
2017-08-30T01:19:34.000Z
|
2022-03-17T22:55:31.000Z
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'camera',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'change_picture',
'dependencies': [
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/compiled_resources2.gyp:iron-selector-extracted',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'../compiled_resources2.gyp:route',
'camera',
'change_picture_browser_proxy',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'change_picture_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'easy_unlock_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'easy_unlock_turn_off_dialog',
'dependencies': [
'<(DEPTH)/ui/webui/resources/cr_elements/cr_dialog/compiled_resources2.gyp:cr_dialog',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'easy_unlock_browser_proxy',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'import_data_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'manage_profile',
'dependencies': [
'../compiled_resources2.gyp:route',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'manage_profile_browser_proxy',
'sync_browser_proxy',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'manage_profile_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'password_prompt_dialog',
'dependencies': [
'../compiled_resources2.gyp:route',
'<(EXTERNS_GYP):quick_unlock_private',
'lock_screen_constants',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'people_page',
'dependencies': [
'../compiled_resources2.gyp:route',
'../settings_page/compiled_resources2.gyp:settings_animated_pages',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'easy_unlock_browser_proxy',
'easy_unlock_turn_off_dialog',
'lock_screen_constants',
'lock_state_behavior',
'profile_info_browser_proxy',
'sync_browser_proxy',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'profile_info_browser_proxy',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'lock_state_behavior',
'dependencies': [
'../compiled_resources2.gyp:route',
'<(EXTERNS_GYP):quick_unlock_private',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'lock_screen_constants',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'lock_screen',
'dependencies': [
'../compiled_resources2.gyp:route',
'lock_screen_constants',
'lock_state_behavior',
'password_prompt_dialog',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'setup_pin_dialog',
'dependencies': [
'../compiled_resources2.gyp:route',
'lock_screen_constants',
'password_prompt_dialog',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'sync_page',
'dependencies': [
'../compiled_resources2.gyp:route',
'../settings_page/compiled_resources2.gyp:settings_animated_pages',
'sync_browser_proxy',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'sync_browser_proxy',
'dependencies': [
'<(DEPTH)/third_party/closure_compiler/externs/compiled_resources2.gyp:metrics_private',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'user_list',
'dependencies': [
'../compiled_resources2.gyp:route',
'<(EXTERNS_GYP):settings_private',
'<(EXTERNS_GYP):users_private',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'users_add_user_dialog',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert',
'<(EXTERNS_GYP):users_private',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'users_page',
'dependencies': [
'user_list',
'users_add_user_dialog',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'import_data_dialog',
'dependencies': [
'../prefs/compiled_resources2.gyp:prefs_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:i18n_behavior',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:web_ui_listener_behavior',
'import_data_browser_proxy',
],
'includes': ['../../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 39.178744
| 126
| 0.622441
| 833
| 8,110
| 5.722689
| 0.123649
| 0.17747
| 0.207048
| 0.145374
| 0.854416
| 0.840151
| 0.817076
| 0.807007
| 0.783092
| 0.764632
| 0
| 0.013068
| 0.188533
| 8,110
| 206
| 127
| 39.368932
| 0.71129
| 0.019112
| 0
| 0.605911
| 0
| 0.004926
| 0.721796
| 0.606087
| 0
| 0
| 0
| 0
| 0.014778
| 1
| 0
| true
| 0.014778
| 0.014778
| 0
| 0.014778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1c0d76e3987062c0970846c52d7f2fde42466cfb
| 24,276
|
py
|
Python
|
integration_test/load_generator.py
|
cl9200/nbase-arc
|
47c124b11b0bb2e8a8428c6d628ce82dc24c1ade
|
[
"Apache-2.0"
] | null | null | null |
integration_test/load_generator.py
|
cl9200/nbase-arc
|
47c124b11b0bb2e8a8428c6d628ce82dc24c1ade
|
[
"Apache-2.0"
] | null | null | null |
integration_test/load_generator.py
|
cl9200/nbase-arc
|
47c124b11b0bb2e8a8428c6d628ce82dc24c1ade
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2015 Naver Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import traceback
import threading
import telnet
import crc16
import util
import sys
import random
from arcci.arcci import *
class LoadGenerator(threading.Thread):
quit = False
def __init__( self, id, telnet_ip, telnet_port, timeout=3 ):
threading.Thread.__init__( self )
self.timeout = timeout
self.ip = telnet_ip
self.port = telnet_port
self.key = 'load_generator_key_%d' % (id)
self.server = telnet.Telnet( '0' )
self.server.connect( self.ip, self.port )
cmd = 'set %s 0\r\n' % self.key
self.server.write(cmd)
self.server.read_until('\r\n', self.timeout)
self.value = 0
self.consistency = True
def quit( self ):
self.quit = True
def getKey( self ):
return self.key
def isConsistent( self ):
return self.consistency
def run( self ):
i = 0
pipelined_multikey_cmd = 'pipelined_multikey 0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
pipelined_multikey_cmd += pipelined_multikey_cmd;
pipelined_multikey_cmd += pipelined_multikey_cmd;
pipelined_multikey_cmd = 'mset %s\r\n' % pipelined_multikey_cmd
pipelined_multikey_cmd += pipelined_multikey_cmd;
pipelined_multikey_cmd += pipelined_multikey_cmd;
pipelined_multikey_cmd += pipelined_multikey_cmd;
while self.quit is not True:
if i > 50000:
i = 0
i = i + 1
try:
self.server.write( pipelined_multikey_cmd )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
response = self.server.read_until( '\r\n', self.timeout )
except:
#util.log( 'Connection closed in LoadGenerator:%s' % pipelined_multikey_cmd )
self.consistency = False
return
cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
try:
self.server.write( cmd )
response = self.server.read_until( '\r\n', self.timeout )
except:
#util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return
cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
try:
self.server.write( cmd )
for read_loop in range(13):
response = self.server.read_until( '\r\n', self.timeout )
except:
#util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return
cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
try:
self.server.write( cmd )
response = self.server.read_until( '\r\n', self.timeout )
except:
#util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return
# cmd = 'info all\r\ninfo all\r\ninfo all\r\n'
# try:
# self.server.write( cmd )
# for read_loop in range(3):
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# response = self.server.read_until( '\r\n\r\n' )
# except:
# util.log( 'Connection closed in LoadGenerator:%s' % cmd )
# self.consistency = False
# return
cmd = 'crc16 %s %d\r\n' % (self.key, i)
try:
self.server.write( cmd )
response = self.server.read_until( '\r\n', self.timeout )
except:
#util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return
self.value = crc16.crc16_buff(str(i), self.value)
try:
if (int(response[1:-2]) != self.value):
if self.consistency:
self.consistency = False
except ValueError:
#util.log( 'Value Error in LoadGenerator, ret:%s' % response[:-2] )
self.consistency = False
return
def is_reply_ok(reply):
if reply[0] == ARC_REPLY_ERROR:
return False
elif reply[0] == ARC_REPLY_STATUS:
return True
elif reply[0] == ARC_REPLY_INTEGER:
return True
elif reply[0] == ARC_REPLY_STRING:
return True
elif reply[0] == ARC_REPLY_ARRAY:
for r in reply[1]:
return is_reply_ok(r)
elif reply[0] == ARC_REPLY_NIL:
return True
class LoadGenerator_ARCCI_FaultTolerance(threading.Thread):
def __init__(self, id, api, verbose=False):
threading.Thread.__init__(self)
self.id = id
self.api = api
self.timeout = 3000
self.verbose = verbose
self.key = 'LoadGenerator_ARCCI_FaulTolerance_%d' % self.id
self.i = 0
self.kv = {}
self.err_cnt = 0
self.cont = True
def quit(self):
self.cont = False
def get_err_cnt(self):
return self.err_cnt
def process(self):
rqst = self.api.create_request()
if rqst == None:
return False
key = "%s%d" % (self.key, self.i % 100)
value = "%d" % self.i
cmd = 'set %s %s' % (key, value)
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.err_cnt += 1
return False
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
self.err_cnt += 1
return False
if reply[0] != ARC_REPLY_STATUS:
print reply
self.err_cnt += 1
return False
elif reply[0] == ARC_REPLY_ERROR:
print reply
self.err_cnt += 1
return False
self.kv[key] = value
except:
if self.verbose:
util.log('Connection closed in LoadGenerator:%s, except' % cmd)
util.log(sys.exc_info())
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
self.consistency = False
return False
return True
def run(self):
try:
while self.cont:
self.process()
util.log('End LoadGenerator_ARCCI_FaultTolerance')
except:
util.log('Exception LoadGenerator_ARCCI_FaultTolerance')
finally:
self.api.destroy()
class LoadGenerator_ARCCI(threading.Thread):
def __init__(self, id, api, timeout_second=3, verbose=False):
threading.Thread.__init__(self)
self.api = api
self.timeout = timeout_second * 1000
self.verbose = verbose
self.cont = True
self.consistency = True
self.value = 0
self.i = 0
self.key = 'load_generator_key_%d' % (id)
self.pipelined_multikey_cmd = 'pipelined_multikey 0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
self.pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
self.pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
self.pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
self.pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
self.pipelined_multikey_cmd += self.pipelined_multikey_cmd;
self.pipelined_multikey_cmd += self.pipelined_multikey_cmd;
self.pipelined_multikey_cmd = 'mset %s\r\n' % self.pipelined_multikey_cmd
self.pipelined_multikey_cmd += self.pipelined_multikey_cmd;
self.pipelined_multikey_cmd += self.pipelined_multikey_cmd;
self.pipelined_multikey_cmd += self.pipelined_multikey_cmd;
def quit( self ):
self.cont = False
def is_reply_ok(self, reply):
if reply[0] == ARC_REPLY_ERROR:
return False
elif reply[0] == ARC_REPLY_STATUS:
return True
elif reply[0] == ARC_REPLY_INTEGER:
return True
elif reply[0] == ARC_REPLY_STRING:
return True
elif reply[0] == ARC_REPLY_ARRAY:
for r in reply[1]:
return is_reply_ok(r)
elif reply[0] == ARC_REPLY_NIL:
return True
def run(self):
loop_cnt = 0;
try:
if self.init_crc() == False:
return False
while self.cont:
loop_cnt += 1
if self.process() == False:
return False
self.api.destroy()
except:
util.log('Connection closed in LoadGenerator:%s, except' % cmd)
util.log(sys.exc_info())
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
return False
def init_crc(self):
# Init CRC
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
cmd = 'set %s 0' % self.key
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
self.consistency = False
return False
if reply[0] != ARC_REPLY_STATUS:
self.consistency = False
return False
except:
util.log('Connection closed in LoadGenerator:%s, except' % cmd)
util.log(sys.exc_info())
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
self.consistency = False
return False
return True
def process(self):
if self.i > 50000:
self.i = 0
self.i += 1
# Pipelined multikey - Request
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
self.api.append_command(rqst, self.pipelined_multikey_cmd)
# Pipelined multikey - Check reply
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
while True:
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
if be_errno < 0:
self.consistency = False
return False
break
if is_reply_ok(reply) == False:
self.consistency = False
return False
except:
if self.verbose:
util.log( 'Connection closed in LoadGenerator:%s' % self.pipelined_multikey_cmd )
self.consistency = False
return False
# Multi - MSET
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6' % (self.key, self.key, self.key, self.key, self.key, self.key)
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
while True:
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
if be_errno < 0:
self.consistency = False
return False
break
if is_reply_ok(reply) == False:
self.consistency = False
return False
except:
if self.verbose:
util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return False
# Multi - MGET
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key, self.key, self.key, self.key)
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
while True:
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
if be_errno < 0:
self.consistency = False
return False
break
if is_reply_ok(reply) == False:
self.consistency = False
return False
except:
if self.verbose:
util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return False
# Multi - DEL
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key, self.key, self.key, self.key)
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
while True:
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
if be_errno < 0:
self.consistency = False
return False
break
if is_reply_ok(reply) == False:
self.consistency = False
return False
except:
if self.verbose:
util.log( 'Connection closed in LoadGenerator:%s' % cmd )
self.consistency = False
return False
# CRC
rqst = self.api.create_request()
if rqst == None:
self.consistency = False
return False
cmd = 'crc16 %s %d' % (self.key, self.i)
self.api.append_command(rqst, cmd)
try:
ret = self.api.do_request(rqst, self.timeout)
if ret != 0:
self.consistency = False
return False
be_errno, reply = self.api.get_reply(rqst)
if be_errno < 0 or reply == None:
if be_errno < 0:
self.consistency = False
return False
if reply[0] != ARC_REPLY_INTEGER:
self.consistency = False
return False
# CRC - Check consistency
self.value = crc16.crc16_buff(str(self.i), self.value)
try:
if (reply[1] != self.value):
if self.verbose:
util.log('Value Error in LoadGenerator, cmd:"%s", reply:%s, value:%d' % (cmd, reply[1], self.value))
self.consistency = False
return False
except ValueError:
if self.verbose:
util.log( 'Value Error in LoadGenerator, ret:%s' % response[:-2] )
self.consistency = False
return False
except:
if self.verbose:
util.log('Connection closed in LoadGenerator:%s, except' % cmd)
util.log(sys.exc_info())
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
self.consistency = False
return False
return True
class LoadGenerator_ARCCI_Affinity(threading.Thread):
## Constructor of LoadGenerator_ARCCI_Affinity
# @param arcci arc_t handle
# @param pattern request pattern. <singlekey | range-singlekey | range-multikey | pipeline-singlekey | pipeline-multikey>
# @param verbose if it is True, logging verbosely
def __init__(self, arcci, pattern='singlekey', verbose=False):
threading.Thread.__init__(self)
self.arcci = arcci
self.timeout = 3000
self.pattern = pattern
self.verbose = verbose
self.key = 'LoadGenerator_ARCCI_Affinity'
self.i = 0
self.err_cnt = 0
self.cont = True
self.op_rotate = 'write' # 'write' | 'read' | 'both'
def quit(self):
self.cont = False
def get_err_cnt(self):
return self.err_cnt
def get_arcci(self):
return self.arcci
def process(self):
if self.op_rotate == 'write':
self.op_rotate = 'read'
elif self.op_rotate == 'read':
self.op_rotate = 'both'
elif self.op_rotate == 'both':
self.op_rotate = 'write'
rqst = self.arcci.create_request()
if rqst == None:
return False
try:
# Make request and command(s)
if self.pattern == 'singlekey':
rand = random.random() * 10000
if self.op_rotate == 'read':
cmd = 'get %s_%d' % (self.key, rand)
else:
cmd = 'set %s_%d %s' % (self.key, rand, rand)
self.arcci.append_command(rqst, cmd)
elif self.pattern == 'range-multikey':
kv = ''
for i in xrange(10):
rand = random.random() * 10000
if self.op_rotate == 'read':
kv += '%s_%d ' % (self.key, rand)
else:
kv += '%s_%d %s ' % (self.key, rand, rand)
if self.op_rotate == 'read':
self.arcci.append_command(rqst, 'mget %s' % kv.strip())
else:
self.arcci.append_command(rqst, 'mset %s' % kv.strip())
elif self.pattern == 'range-singlekey':
kv = ''
rand = random.random() * 10000
for i in xrange(10):
if self.op_rotate == 'read':
kv += '%s_%d ' % (self.key, rand)
else:
kv += '%s_%d %s ' % (self.key, rand, rand)
if self.op_rotate == 'read':
self.arcci.append_command(rqst, 'mget %s' % kv.strip())
else:
self.arcci.append_command(rqst, 'mset %s' % kv.strip())
elif self.pattern == 'pipeline-singlekey':
rand = random.random() * 10000
for i in xrange(10):
if self.op_rotate == 'read':
cmd = 'get %s_%d' % (self.key, rand)
elif self.op_rotate == 'write':
cmd = 'set %s_%d %s' % (self.key, rand, rand)
elif self.op_rotate == 'both':
if i % 2:
cmd = 'get %s_%d' % (self.key, rand)
else:
cmd = 'set %s_%d %s' % (self.key, rand, rand)
self.arcci.append_command(rqst, cmd)
elif self.pattern == 'pipeline-multikey':
for i in xrange(10):
rand = random.random() * 10000
if self.op_rotate == 'read':
cmd = 'get %s_%d' % (self.key, rand)
elif self.op_rotate == 'write':
cmd = 'set %s_%d %s' % (self.key, rand, rand)
elif self.op_rotate == 'both':
if i % 2:
cmd = 'get %s_%d' % (self.key, rand)
else:
cmd = 'set %s_%d %s' % (self.key, rand, rand)
self.arcci.append_command(rqst, cmd)
# Send request
ret = self.arcci.do_request(rqst, self.timeout)
if ret != 0:
self.err_cnt += 1
return False
# Receive reply
be_errno, reply = self.arcci.get_reply(rqst)
if be_errno < 0 or reply == None:
self.err_cnt += 1
return False
# Handle result
if reply[0] != ARC_REPLY_STATUS:
self.err_cnt += 1
return False
elif reply[0] == ARC_REPLY_ERROR:
self.err_cnt += 1
return False
except:
if self.verbose:
util.log('Connection closed in LoadGenerator:%s, except' % cmd)
util.log(sys.exc_info())
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
self.consistency = False
return False
return True
def run(self):
try:
while self.cont:
self.process()
util.log('End LoadGenerator_ARCCI_Affinity')
except:
util.log('Exception LoadGenerator_ARCCI_Affinity')
util.log(sys.exc_info()[0])
finally:
self.arcci.destroy()
| 35.080925
| 159
| 0.535096
| 2,699
| 24,276
| 4.672842
| 0.076695
| 0.033302
| 0.066603
| 0.084523
| 0.823501
| 0.793768
| 0.763321
| 0.721694
| 0.706153
| 0.701554
| 0
| 0.079266
| 0.369624
| 24,276
| 691
| 160
| 35.131693
| 0.744887
| 0.086382
| 0
| 0.812379
| 0
| 0.007737
| 0.109795
| 0.055598
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.015474
| null | null | 0.01354
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c32c3186b313f3e843531f74356fcc5f1147f9b
| 45,071
|
py
|
Python
|
pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/bridge_domain.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/bridge_domain.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/l2vpn/iosxr/bridge_domain.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
from abc import ABC
import warnings
import contextlib
from genie.conf.base.attributes import UnsupportedAttributeWarning,\
AttributesHelper
from genie.conf.base.cli import CliConfigBuilder
from genie.conf.base.config import CliConfig
from genie.libs.conf.interface import BviInterface
from genie.libs.conf.l2vpn.pseudowire import PseudowireNeighbor,\
PseudowireIPv4Neighbor, PseudowireEviNeighbor
class BridgeDomain(ABC):
class DeviceAttributes(ABC):
class InterfaceAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn / bridge group someword (config-l2vpn-bg)
# iosxr: l2vpn / bridge group someword (config-l2vpn-bg)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 (config-l2vpn-bg-bd-ac)
with configurations.submode_context(
attributes.format(
'routed interface {interface_name}' if isinstance(self.interface, BviInterface) else 'interface {interface_name}',
force=True),
exit_cmd='' if isinstance(self.interface, BviInterface) else 'exit', # routed interface may not be an actual submode
):
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
if isinstance(self.interface, BviInterface):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1 / split-horizon group core
v = attributes.value('split_horizon_group_core')
if v is True:
configurations.append_line('split-horizon group core')
if configurations:
# There are configurations... It must be a submode; exit.
configurations.append_line('exit', raw=True)
else:
# There are no configurations... May not be be a submode; Don't exit.
pass
else:
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dhcp ipv4 none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dhcp ipv4 snoop profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection (config-l2vpn-bg-bd-ac-dai)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation (config-l2vpn-bg-bd-ac-dai-av)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / dst-mac
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / dst-mac disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / ipv4
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / ipv4 disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / src-mac
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / address-validation / src-mac disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / dynamic-arp-inspection / logging disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding unknown-unicast
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / flooding unknown-unicast disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / igmp snooping profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard (config-l2vpn-bg-bd-ac-ipsg)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / ip-source-guard / logging disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)
sub, attributes2 = attributes.namespace('mac')
if sub is not None:
configurations.append_block(
sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mld snooping profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / split-horizon group
v = attributes.value('split_horizon_group')
if v is True:
configurations.append_line('split-horizon group')
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / static-mac-address aaaa.bbbb.cccc
configurations.append_line(attributes.format('static-mac-address {static_mac_address}'))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control broadcast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control broadcast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control multicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control multicast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control unknown-unicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / storm-control unknown-unicast pps 1
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class MacAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac (config-l2vpn-bg-bd-ac-mac)
with configurations.submode_context('mac', cancel_empty=True):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging (config-l2vpn-bg-bd-ac-mac-aging)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / time 300
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / type absolute
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / aging / type inactivity
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / learning
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / learning disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit (config-l2vpn-bg-bd-ac-mac-limit)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action no-flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / maximum 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification both
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification syslog
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / limit / notification trap
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / port-down flush
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / port-down flush disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure (config-l2vpn-bg-bd-ac-mac-secure)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action restrict
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 / mac / secure / logging disable
pass
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class NeighborAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False, **kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
nbr_ctx = None
nbr_is_submode = True
if isinstance(self.neighbor, PseudowireIPv4Neighbor):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw)
assert self.ip is not None
assert self.pw_id is not None
nbr_ctx = attributes.format('neighbor {ip} pw-id {pw_id}', force=True)
elif isinstance(self.neighbor, PseudowireEviNeighbor):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn 1 target 1
assert self.evi is not None
assert self.ac_id is not None
nbr_ctx = attributes.format('neighbor evpn {evi.evi_id} target {ac_id}', force=True)
nbr_is_submode = False
else:
raise ValueError(self.neighbor)
if not nbr_is_submode:
configurations.append_line(nbr_ctx)
else:
with configurations.submode_context(nbr_ctx):
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / backup neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw-backup)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / backup neighbor 1.2.3.4 pw-id 1 / pw-class someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / dhcp ipv4 none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / dhcp ipv4 snoop profile someword3
v = attributes.value('dhcp_ipv4_snooping_profile')
if v is not None:
if v is False:
configurations.append_line('dhcp ipv4 none')
else:
configurations.append_line('dhcp ipv4 snoop profile {}'.format(v))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding unknown-unicast
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / flooding unknown-unicast disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / igmp snooping profile someword3
v = attributes.value('igmp_snooping_profile')
if v is not None:
if v is False:
pass
else:
configurations.append_line('igmp snooping profile {}'.format(v))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac (config-l2vpn-bg-bd-pw-mac)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging (config-l2vpn-bg-bd-pw-mac-aging)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / time 300
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / type absolute
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / aging / type inactivity
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / learning
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / learning disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit (config-l2vpn-bg-bd-pw-mac-limit)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action no-flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / maximum 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification both
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification syslog
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / limit / notification trap
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / port-down flush
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / port-down flush disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure (config-l2vpn-bg-bd-pw-mac-secure)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action restrict
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mac / secure / logging disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mld snooping profile someword3
v = attributes.value('mld_snooping_profile')
if v is not None:
if v is False:
pass
else:
configurations.append_line('mld snooping profile {}'.format(v))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / mpls static label local 16 remote 16
remote_label = attributes.value('mpls_static_label')
if remote_label is not None:
local_label = self.parent.neighbor_attr[self.remote_neighbor].mpls_static_label
if local_label is None:
warnings.warn(
'remote neighbor {!r} mpls_static_label missing'.format(self.remote_neighbor),
UnsupportedAttributeWarning)
else:
configurations.append_line('mpls static label local {} remote {}'.\
format(local_label, remote_label))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / pw-class someword3
v = attributes.value('pw_class')
if v is not None:
configurations.append_line('pw-class {}'.\
format(v.device_attr[self.device].name))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / split-horizon group
if attributes.value('split_horizon'):
configurations.append_line('split-horizon group')
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / static-mac-address aaaa.bbbb.cccc
configurations.append_line(attributes.format('static-mac-address {static_mac_address}'))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control broadcast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control broadcast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control multicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control multicast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control unknown-unicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 / storm-control unknown-unicast pps 1
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class EviAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / evi 1 (config-l2vpn-bg-bd-evi)
with configurations.submode_context(
attributes.format('evi {evi_id}', force=True),
exit_cmd=''): # evi is not a sub-mode in all releases.
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class VniAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn / bridge group someword (config-l2vpn-bg)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / member vni 1 (config-l2vpn-bg-bd-vni)
with configurations.submode_context(attributes.format('member vni {vni_id}', force=True)):
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class MacAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac (config-l2vpn-bg-bd-mac)
with configurations.submode_context('mac', cancel_empty=True):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging (config-l2vpn-bg-bd-mac-aging)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / time 300
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / type absolute
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / aging / type inactivity
with configurations.submode_context('aging',cancel_empty=True):
configurations.append_line(attributes.format('time {aging_time}'))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / learning
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / learning disable
v = attributes.value('learning_disable')
if v is True:
configurations.append_line('learning disable')
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit (config-l2vpn-bg-bd-mac-limit)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action no-flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / maximum 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification both
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification syslog
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / limit / notification trap
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / port-down flush
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / port-down flush disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure (config-l2vpn-bg-bd-mac-secure)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action restrict
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / secure / logging disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / static-address aaaa.bbbb.cccc drop
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw access-pw disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw optimize
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw relay
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac / withdraw state-down
pass
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
def build_config(self, apply=True, attributes=None, unconfig=False,
contained=False, **kwargs):
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# iosxr: l2vpn (config-l2vpn)
submode_stack = contextlib.ExitStack()
if not contained:
submode_stack.enter_context(
configurations.submode_context('l2vpn'))
# iosxr: l2vpn / bridge group someword (config-l2vpn-bg)
with configurations.submode_context(attributes.format('bridge group {group_name}', force=True, cancel_empty=True)):
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 (config-l2vpn-bg-bd)
with configurations.submode_context(attributes.format('bridge-domain {name}', force=True)):
if unconfig and attributes.iswildcard:
configurations.submode_unconfig()
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / coupled-mode
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dhcp ipv4 snoop profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection (config-l2vpn-bg-bd-dai)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation (config-l2vpn-bg-bd-dai-av)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / dst-mac
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / ipv4
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / address-validation / src-mac
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / dynamic-arp-inspection / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / evi 1 (config-l2vpn-bg-bd-evi)
for sub, attributes2 in attributes.mapping_values('evi_attr', keys=self.evis, sort=True):
configurations.append_block(
sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / flooding disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / flooding unknown-unicast disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / igmp snooping disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / igmp snooping profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / routed interface BVI1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / interface Bundle-Ether1 (config-l2vpn-bg-bd-ac)
for sub, attributes2 in attributes.mapping_values('interface_attr', keys=self.interfaces, sort=True):
configurations.append_block(
sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / ip-source-guard (config-l2vpn-bg-bd-ipsg)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / ip-source-guard / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mac (config-l2vpn-bg-bd-mac)
ns, attributes2 = attributes.namespace('mac')
if ns is not None:
configurations.append_block(
ns.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / member vni 1 (config-l2vpn-bg-bd-vni)
for sub, attributes2 in attributes.mapping_values('vni_attr', keys=self.vnis, sort=True):
configurations.append_block(
sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mld snooping profile someword3
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / mtu 100
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor 1.2.3.4 pw-id 1 (config-l2vpn-bg-bd-pw)
for sub, attributes2 in attributes.mapping_values('neighbor_attr', keys=self.pseudowire_neighbors, sort=True):
configurations.append_block(
sub.build_config(apply=False, attributes=attributes2, unconfig=unconfig))
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn evi 1 target 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / neighbor evpn evi 1 target 1 source 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / nv satellite (config-l2vpn-bg-bd-nv)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / nv satellite / offload ipv4 multicast enable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core (config-l2vpn-bg-bd-pbb-core)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / evi 1 (config-l2vpn-bg-bd-pbb-core-evi)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac (config-l2vpn-bg-bd-pbb-core-mac)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging (config-l2vpn-bg-bd-pbb-core-mac-aging)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / time 300
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / type absolute
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / aging / type inactivity
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / learning
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mac / learning disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / mmrp-flood-optimization
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb core / rewrite ingress tag push dot1ad 1 symmetric
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 (config-l2vpn-bg-bd-pbb-edge)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / dhcp ipv4 none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / dhcp ipv4 snoop profile someword4
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / igmp snooping profile someword4
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac (config-l2vpn-bg-bd-pbb-edge-mac)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging (config-l2vpn-bg-bd-pbb-edge-mac-aging)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / time 300
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / type absolute
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / aging / type inactivity
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / learning
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / learning disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit (config-l2vpn-bg-bd-pbb-edge-mac-limit)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action no-flood
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / maximum 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification both
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification syslog
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / limit / notification trap
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure (config-l2vpn-bg-bd-pbb-edge-mac-sec)
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / accept-shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action none
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action restrict
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / action shutdown
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / logging
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / mac / secure / logging disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / split-horizon group vfi disable
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / static-mac-address aaaa.bbbb.cccc bmac aaaa.bbbb.cccc
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / pbb edge i-sid 256 core-bridge someword3 / unknown-unicast-bmac aaaa.bbbb.cccc
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / shutdown
if attributes.value('shutdown'):
configurations.append_line('shutdown')
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control broadcast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control broadcast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control multicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control multicast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control unknown-unicast kbps 64
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / storm-control unknown-unicast pps 1
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / transport-mode vlan passthrough
# iosxr: l2vpn / bridge group someword / bridge-domain someword2 / vfi someword3 (config-l2vpn-bg-bd-vfi)
for vfi, attributes2 in attributes.sequence_values('vfis'):
configurations.append_block(
str(vfi.build_config(apply=False, attributes=attributes2, unconfig=unconfig)))
submode_stack.close()
if apply:
if configurations:
self.device.configure(str(configurations), fail_invalid=True)
else:
return CliConfig(device=self.device, unconfig=unconfig,
cli_config=configurations, fail_invalid=True)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
| 85.039623
| 191
| 0.609305
| 4,978
| 45,071
| 5.487344
| 0.049217
| 0.0842
| 0.134134
| 0.176051
| 0.901779
| 0.88882
| 0.863889
| 0.845439
| 0.835847
| 0.830795
| 0
| 0.0342
| 0.312973
| 45,071
| 529
| 192
| 85.200378
| 0.847957
| 0.589315
| 0
| 0.555024
| 0
| 0
| 0.044492
| 0.003886
| 0
| 0
| 0
| 0
| 0.08134
| 1
| 0.066986
| false
| 0.023923
| 0.038278
| 0.033493
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c51bdefc6771c3e53ff788f0a5919bc9fab9f8a
| 5,742
|
py
|
Python
|
src/templates.py
|
HongzoengNg/UqerCodeGenerator
|
c54dc8840c7c110eabadb66b10a7dc78060ee13d
|
[
"MIT"
] | null | null | null |
src/templates.py
|
HongzoengNg/UqerCodeGenerator
|
c54dc8840c7c110eabadb66b10a7dc78060ee13d
|
[
"MIT"
] | null | null | null |
src/templates.py
|
HongzoengNg/UqerCodeGenerator
|
c54dc8840c7c110eabadb66b10a7dc78060ee13d
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
'''
File: templates.py
File Created: Tuesday, 12th February 2019
Author: Hongzoeng Ng (kenecho@hku.hk)
-----
Last Modified: Tuesday, 12th February 2019
Modified By: Hongzoeng Ng (kenecho@hku.hk>)
-----
Copyright @ 2018 KenEcho
'''
moving_average_code = """# Moving average strategy
start = '%s'
end = '%s'
universe = %s
benchmark = "HS300"
freq = "d"
refresh_rate = 1
max_history_window = %s
accounts = {
"security_account": AccountConfig(
account_type="security",
capital_base=10000000,
commission=Commission(buycost=0.00, sellcost=0.00, unit="perValue"),
slippage=Slippage(value=0.00, unit="perValue")
)
}
def initialize(context):
context.asset_allocation = %s
def handle_data(context):
security_account = context.get_account("security_account")
current_universe = context.get_universe("stock", exclude_halt=False)
hist = context.get_attribute_history(
attribute="closePrice", time_range=max_history_window, style="sat"
)
for stk in current_universe:
short_ma = hist[stk][-%s:].mean()
long_ma = hist[stk][:].mean()
if (short_ma > long_ma) and (stk not in security_account.get_positions()):
security_account.order_pct_to(stk, context.asset_allocation[stk])
elif short_ma <= long_ma and (stk in security_account.get_positions()):
security_account.order_to(stk, 0)
"""
macd_code = """# MACD Strategy
import pandas as pd
import numpy as np
import talib
start = '%s'
end = '%s'
universe = %s
benchmark = 'HS300'
freq = 'd'
refresh_rate = 1
max_history_window = %s
accounts = {
'security_account': AccountConfig(
account_type='security',
capital_base=10000000,
commission = Commission(buycost=0.00, sellcost=0.00, unit='perValue'),
slippage = Slippage(value=0.00, unit='perValue')
)
}
def initialize(context):
context.asset_allocation = %s
context.short_win = %s # default to be 12
context.long_win = %s # default to be 26
context.macd_win = %s # default to be 9
def handle_data(context):
security_account = context.get_account('security_account')
current_universe = context.get_universe('stock', exclude_halt=False)
hist = context.get_attribute_history(
attribute='closePrice', time_range=max_history_window, style='sat'
)
for stk in current_universe:
prices = hist[stk].values
macd, signal, macdhist = talib.MACD(
prices,
fastperiod=context.short_win,
slowperiod=context.long_win,
signalperiod=context.macd_win
)
if (macd[-1] - signal[-1] > 0) and (stk not in security_account.get_positions()):
security_account.order_pct_to(stk, context.asset_allocation[stk])
elif (macd[-1] - signal[-1] < 0) and (stk in security_account.get_positions()):
security_account.order_to(stk, 0)
"""
stochastic_oscillator_code = """# Stochastic oscillator
import pandas as pd
import numpy as np
import talib as ta
start = '%s'
end = '%s'
universe = %s
benchmark = 'HS300'
freq = 'd'
refresh_rate = 1
max_history_window = %s
accounts = {
'security_account': AccountConfig(
account_type='security',
capital_base=10000000,
commission = Commission(buycost=0.00, sellcost=0.00, unit='perValue'),
slippage = Slippage(value=0.00, unit='perValue')
)
}
def initialize(context):
context.asset_allocation = %s
context.fastk = %s # default to be 14
context.slowk = %s # default to be 3
context.slowd = %s # default to be 3
def handle_data(context):
security_account = context.get_account('security_account')
current_universe = context.get_universe('stock', exclude_halt=False)
hist_close = context.get_attribute_history(
attribute='closePrice', time_range=max_history_window, style='sat'
)
hist_high = context.get_attribute_history(
attribute='highPrice', time_range=max_history_window, style='sat'
)
hist_low = context.get_attribute_history(
attribute='lowPrice', time_range=max_history_window, style='sat'
)
for stk in current_universe:
close = hist_close[stk].values
high = hist_high[stk].values
low = hist_low[stk].values
slowk, slowd = ta.STOCH(
high, low, close,
fastk_period=context.fastk,
slowk_period=context.slowk,
slowk_matype=0,
slowd_period=context.slowd,
slowd_matype=0
)
if (slowk[-1] > slowd[-1]) and (stk not in security_account.get_positions()):
security_account.order_pct_to(stk, context.asset_allocation[stk])
elif (slowk[-1] < slowd[-1]) and (stk in security_account.get_positions()):
security_account.order_to(stk, 0)
"""
buy_hold_code = """# Buy & Hold strategy
start = '%s'
end = '%s'
universe = %s
benchmark = "HS300"
freq = "d"
refresh_rate = 1
accounts = {
"security_account": AccountConfig(
account_type="security",
capital_base=10000000,
commission=Commission(buycost=0.00, sellcost=0.00, unit="perValue"),
slippage=Slippage(value=0.00, unit="perValue")
)
}
def initialize(context):
context.asset_allocation = %s
def handle_data(context):
security_account = context.get_account("security_account")
current_universe = context.get_universe("stock", exclude_halt=False)
for stk in current_universe:
if stk not in security_account.get_positions():
security_account.order_pct_to(stk, context.asset_allocation[stk])
"""
| 29.295918
| 89
| 0.65465
| 711
| 5,742
| 5.077356
| 0.184248
| 0.108033
| 0.035457
| 0.033241
| 0.804432
| 0.74626
| 0.73241
| 0.725208
| 0.71385
| 0.71385
| 0
| 0.028204
| 0.228144
| 5,742
| 196
| 90
| 29.295918
| 0.786327
| 0.042494
| 0
| 0.618421
| 0
| 0.013158
| 0.979421
| 0.371335
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039474
| 0
| 0.039474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c992d7d3fdc3211525704cc81c4722461758c0c
| 32,989
|
py
|
Python
|
spyder_vim/tests/test_vim.py
|
bgallois/spyder-vim
|
9dd3c500571c07cebd713049d284b1a07d668352
|
[
"MIT"
] | null | null | null |
spyder_vim/tests/test_vim.py
|
bgallois/spyder-vim
|
9dd3c500571c07cebd713049d284b1a07d668352
|
[
"MIT"
] | null | null | null |
spyder_vim/tests/test_vim.py
|
bgallois/spyder-vim
|
9dd3c500571c07cebd713049d284b1a07d668352
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
#
"""Tests for the plugin."""
# Standard library imports
import os
import os.path as osp
# Test library imports
import pytest
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock # Python 2
# Qt imports
from qtpy.QtCore import Qt
from qtpy.QtGui import QTextCursor
from qtpy.QtWidgets import QWidget, QVBoxLayout, QApplication
# Spyder imports
from spyder.plugins.editor.widgets.editor import EditorStack
# Local imports
from spyder_vim.vim import Vim
from spyder_vim.vim_widget import RE_VIM_PREFIX
LOCATION = osp.realpath(osp.join(
os.getcwd(), osp.dirname(__file__)))
class VimTesting(Vim):
CONF_FILE = False
def __init(self, parent):
Vim.__init__(self, parent)
class EditorMock(QWidget):
"""Editor plugin mock."""
def __init__(self, editor_stack):
"""Editor Mock constructor."""
QWidget.__init__(self, None)
self.editor_stack = editor_stack
self.editorsplitter = self.editor_stack
self.open_action = Mock()
self.new_action = Mock()
self.save_action = Mock()
self.close_action = Mock()
layout = QVBoxLayout()
layout.addWidget(self.editor_stack)
self.setLayout(layout)
def get_current_editorstack(self):
"""Return EditorStack instance."""
return self.editor_stack
class MainMock(QWidget):
"""Spyder MainWindow mock."""
def __init__(self, editor_stack):
"""Main Window Mock constructor."""
QWidget.__init__(self, None)
self.plugin_focus_changed = Mock()
self.editor = EditorMock(editor_stack)
layout = QVBoxLayout()
layout.addWidget(self.editor)
self.setLayout(layout)
add_dockwidget = Mock()
@pytest.fixture
def editor_bot(qtbot):
"""Editorstack pytest fixture."""
text = (' 123\n'
'line 1\n'
'line 2\n'
'line 3\n'
'line 4') # a newline is added at end
editor_stack = EditorStack(None, [])
editor_stack.set_find_widget(Mock())
editor_stack.set_io_actions(Mock(), Mock(), Mock(), Mock())
finfo = editor_stack.new(osp.join(LOCATION, 'foo.txt'), 'utf-8', text)
main = MainMock(editor_stack)
# main.show()
qtbot.addWidget(main)
return main, editor_stack, finfo.editor, qtbot
@pytest.fixture
def vim_bot(editor_bot):
"""Create an spyder-vim plugin instance."""
main, editor_stack, editor, qtbot = editor_bot
vim = VimTesting(main)
vim.register_plugin()
return main, editor_stack, editor, vim, qtbot
def test_prefix_no_match():
"""Test that prefix regex does not match invalid prefix."""
match = RE_VIM_PREFIX.match("d")
assert match is None
def test_one_char():
"""Test that prefix regex matches valid single prefix."""
groups = RE_VIM_PREFIX.match("D").groups()
assert groups == ("", "D", "")
def test_two_chars_command():
"""Test that prefix regex matches valid pairs of prefixes."""
groups = RE_VIM_PREFIX.match("dd").groups()
assert groups == ("", "dd", "")
def test_number_no_match():
"""Test that prefix regex does not match invalid number combinations."""
match = RE_VIM_PREFIX.match("11")
assert match is None
def test_number_and_zero_no_match():
"""Test that prefix regex does not match 10 combination."""
match = RE_VIM_PREFIX.match("10")
assert match is None
def test_two_chars_repeat():
"""Test that prefix regex matches two prefix combinations."""
groups = RE_VIM_PREFIX.match("2D").groups()
assert groups == ("2", "D", "")
def test_three_chars_repeat():
"""Test that prefix regex matches three prefix combinations."""
groups = RE_VIM_PREFIX.match("21D").groups()
assert groups == ("21", "D", "")
def test_three_chars_with_zero_repeat():
"""Test that prefix regex matches three combinations that contain 0."""
groups = RE_VIM_PREFIX.match("20D").groups()
assert groups == ("20", "D", "")
def test_k_command(vim_bot):
"""Test k command (Cursor moves up)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
cmd_line = vim.get_focus_widget()
line, _ = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'k')
new_line, _ = editor.get_cursor_line_column()
assert new_line == line - 1
def test_arrowup_command(vim_bot):
"""Test k command (Cursor moves up)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
cmd_line = vim.get_focus_widget()
line, _ = editor.get_cursor_line_column()
qtbot.keyPress(editor, Qt.Key_Up)
new_line, _ = editor.get_cursor_line_column()
assert new_line == line - 1
def test_h_command(vim_bot):
"""Test h command (Cursor moves to the left)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'h')
_, new_col = editor.get_cursor_line_column()
assert new_col == col - 1
def test_j_command(vim_bot):
"""Test j command (Cursor moves down)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.stdkey_up(True)
cmd_line = vim.get_focus_widget()
line, _ = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'j')
new_line, _ = editor.get_cursor_line_column()
assert new_line == line + 1
def test_arrowdown_command(vim_bot):
"""Test k command (Cursor moves up)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.stdkey_up(True)
cmd_line = vim.get_focus_widget()
line, _ = editor.get_cursor_line_column()
qtbot.keyPress(editor, Qt.Key_Down)
new_line, _ = editor.get_cursor_line_column()
assert new_line == line + 1
def test_l_shortchut(vim_bot):
"""Test j command (Cursor moves right)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
qtbot.keyPress(editor, Qt.Key_Left)
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'l')
_, new_col = editor.get_cursor_line_column()
assert new_col == col + 1
def test_arrowright_shortchut(vim_bot):
"""Test j command (Cursor moves right)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
qtbot.keyPress(editor, Qt.Key_Left)
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyPress(editor, Qt.Key_Right)
_, new_col = editor.get_cursor_line_column()
assert new_col == col + 1
def test_arrowleft_shortchut(vim_bot):
"""Test j command (Cursor moves right)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyPress(editor, Qt.Key_Left)
_, new_col = editor.get_cursor_line_column()
assert new_col == col - 1
def test_w_shortchut(vim_bot):
"""Test w command (Cursor moves to the next word)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.moveCursor(QTextCursor.PreviousWord, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Left)
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'w')
_, new_col = editor.get_cursor_line_column()
assert new_col == col + 1
def test_b_shortchut(vim_bot):
"""Test b command (Cursor moves to the previous word)."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
qtbot.keyPress(editor, Qt.Key_Left)
editor.moveCursor(QTextCursor.NextWord, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
_, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'b')
_, new_col = editor.get_cursor_line_column()
assert new_col == col - 1
def test_f_shortchut(vim_bot):
"""Cursor moves to the next ocurrence of a character."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.go_to_line(2)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'fe')
qtbot.keyClicks(cmd_line, 'i')
new_line, new_col = editor.get_cursor_line_column()
print(line, col)
print(new_line, new_col)
assert new_col == col + len('lin')
def test_uppercase_f_shortchut(vim_bot):
"""Cursor moves to the previous ocurrence of a character."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.go_to_line(2)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'Fi')
qtbot.keyClicks(cmd_line, 'i')
new_line, new_col = editor.get_cursor_line_column()
print(line, col)
print(new_line, new_col)
assert new_col == col - 1
def test_space_command(vim_bot):
"""Cursor moves to the right."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(4)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, ' ')
new_line, new_col = editor.get_cursor_line_column()
print(line, col)
print(new_line, new_col)
assert new_col == col + 1
def test_backspace_command(vim_bot):
"""Cursor moves to the left."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(4)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, '\b')
new_line, new_col = editor.get_cursor_line_column()
print(line, col)
print(new_line, new_col)
assert new_col == col - 1
def test_return_command(vim_bot):
"""Move to the start of the next line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(2)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, _ = editor.get_cursor_line_column()
qtbot.keyPress(cmd_line, Qt.Key_Return)
new_line, _ = editor.get_cursor_line_column()
# print(line, col)
# print(new_line, new_col)
assert new_line == line + 1
def test_dollar_command(vim_bot):
"""Go to the end of the current line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, '$')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col + len('line 2')
def test_zero_command(vim_bot):
"""Go to the start of the current line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(4)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, '0')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col - len('line 3')
def test_caret_command(vim_bot):
"""Go to the first non-blank character of the line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(1)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, '^')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col - len('123')
def test_uppercase_g_command(vim_bot):
"""Go to the first non-blank character of the last line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(1)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'G')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col and new_line == line + 4
def test_gg_command(vim_bot):
"""Go to the first position of the first line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(1)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'gg')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == 0 and new_line == 0
def test_uppercase_i_command(vim_bot):
"""Insert text before the first non-blank in the line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(4)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'I')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == 0
def test_a_command(vim_bot):
"""Append text after the cursor."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(2)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'a')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col + 1
def test_uppercase_a_command(vim_bot):
"""Append text at the end of the line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'A')
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col + len('line 2')
def test_o_command(vim_bot):
"""Begin a new line below the cursor and insert text."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(2)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'o')
new_line, new_col = editor.get_cursor_line_column()
assert new_line == line + 1 and new_col == 0
def test_uppercase_O_command(vim_bot):
"""Begin a new line above the cursor and insert text."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'O')
new_line, new_col = editor.get_cursor_line_column()
assert new_line == line and new_col == 0
def test_u_command(vim_bot):
"""Undo changes."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyClicks(editor, 'spam')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
line, col = editor.get_cursor_line_column()
qtbot.keyClicks(cmd_line, 'u')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col - len('spam')
def test_d_command(vim_bot):
"""Delete selection."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
lines, cols = editor.get_cursor_line_column()
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyPress(cmd_line, 'v')
qtbot.keyPress(cmd_line, 'l')
qtbot.keyPress(cmd_line, 'l')
qtbot.keyClicks(cmd_line, 'd')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_lines, new_cols = editor.get_cursor_line_column()
assert new_cols == cols - 2
def test_dd_command(vim_bot):
"""Delete line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
num_lines = editor.get_line_count()
qtbot.keyClicks(cmd_line, 'dd')
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_num_lines = editor.get_line_count()
assert new_num_lines == num_lines - 1
def test_uppercase_d_command(vim_bot):
"""Delete line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
line, col = editor.get_cursor_line_column()
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'D')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_line, new_col = editor.get_cursor_line_column()
assert new_col == col - len('ne 2')
def test_dw_command(vim_bot):
"""Cut words."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
line, col = editor.get_cursor_line_column()
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'dw')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_line, new_col = editor.get_cursor_line_column()
assert new_col == 2
def test_cw_command(vim_bot):
"""Cut words and edit."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
line, col = editor.get_cursor_line_column()
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'cw')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_line, new_col = editor.get_cursor_line_column()
assert new_col == 2
def test_x_command(vim_bot):
"""Delete the character under the cursor wth delete from EndOfLine."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(1)
# editor.stdkey_up(True)
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
line, col = editor.get_cursor_line_column()
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'x')
editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
new_line, new_col = editor.get_cursor_line_column()
assert new_col == 5
def test_y_command(vim_bot):
"""Copy selected text on visual mode."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, '3l')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == 'lin'
def test_yy_command(vim_bot):
"""Copy current line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, 'yy')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard[:-1] == 'line 2'
def test_yy_no_visual_command(vim_bot):
"""Copy current line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
# qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, 'yy')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard[:-1] == 'line 2'
def test_yw_command(vim_bot):
"""Copy word."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
cmd_line = vim.get_focus_widget()
# qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, 'yw')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == 'line'
def test_ydollar_command(vim_bot):
"""Copy until end of line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
# qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, 'y$')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == 'ne 2'
def test_p_command_char_mode(vim_bot):
"""Paste characters after cursor."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(2)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, '2k')
qtbot.keyClicks(cmd_line, 'y')
qtbot.keyClicks(cmd_line, 'p')
text = editor.toPlainText()
expected_text = (' 123\n'
'l 123\n'
'liine 1\n'
'line 2\n'
'line 3\n'
'line 4')
# clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert text == expected_text
def test_p_command_line_mode(vim_bot):
"""Paste line below current line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, '2h')
qtbot.keyClicks(cmd_line, 'y')
qtbot.keyClicks(cmd_line, 'p')
text = editor.toPlainText()
expected_text = (' 123\n'
'line 1\n'
'line 2\n'
'line 2\n'
'line 3\n'
'line 4')
# clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert text == expected_text
def test_zz_command(vim_bot):
"""Save and close file."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'ZZ')
main.editor.close_action.trigger.assert_called_once_with()
main.editor.save_action.trigger.assert_called_once_with()
def test_w_command(vim_bot):
"""Save file."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':w')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.save_action.trigger.assert_called_once_with()
def test_q_command(vim_bot):
"""Close file."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':q')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.close_action.trigger.assert_called_once_with()
def test_wq_command(vim_bot):
"""Save and Close file."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':wq')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.close_action.trigger.assert_called_once_with()
main.editor.save_action.trigger.assert_called_once_with()
def test_e_command_no_args(vim_bot):
"""Reload file."""
main, editor_stack, editor, vim, qtbot = vim_bot
original_state = editor.toPlainText()
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':e')
qtbot.keyPress(cmd_line, Qt.Key_Return)
state = editor.toPlainText()
assert original_state == state
def test_e_command_args(vim_bot):
"""Reload and open file."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':e .')
qtbot.keyPress(cmd_line, Qt.Key_Return)
main.editor.open_action.trigger.assert_called_once_with()
def test_colon_number_command(vim_bot):
"""Go to line."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, ':1')
qtbot.keyPress(cmd_line, Qt.Key_Return)
line, _ = editor.get_cursor_line_column()
assert line == 0
def test_h_command_char_mode(vim_bot):
"""Select character to the left."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, '2h')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == 'li'
def test_j_command_char_mode(vim_bot):
"""Select character down."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, '2j')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
# print(list('ne 2 line 3 li'))
assert clipboard == u'ne 2\u2029line 3\u2029li'
def test_j_command_line_mode(vim_bot):
"""Select line down."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, '2j')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == u' 123\u2029line 1\u2029'
def test_k_command_line_mode(vim_bot):
"""Select line up."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, '2k')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == u' 123\u2029line 1\u2029line 2\u2029'
def test_gg_command_line_mode(vim_bot):
"""Select from first line character."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'V')
qtbot.keyClicks(cmd_line, 'gg')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == u' 123\u2029line 1\u2029line 2\u2029'
def test_gg_command_char_mode(vim_bot):
"""Select from first line character."""
main, editor_stack, editor, vim, qtbot = vim_bot
editor.stdkey_backspace()
editor.go_to_line(3)
editor.moveCursor(QTextCursor.StartOfLine, QTextCursor.KeepAnchor)
qtbot.keyPress(editor, Qt.Key_Right)
qtbot.keyPress(editor, Qt.Key_Right)
cmd_line = vim.get_focus_widget()
qtbot.keyClicks(cmd_line, 'v')
qtbot.keyClicks(cmd_line, 'gg')
qtbot.keyClicks(cmd_line, 'y')
clipboard = QApplication.clipboard().text()
# editor.moveCursor(QTextCursor.EndOfLine, QTextCursor.KeepAnchor)
# new_line, new_col = editor.get_cursor_line_column()
assert clipboard == u' 123\u2029line 1\u2029li'
| 34.872093
| 76
| 0.706114
| 4,495
| 32,989
| 4.901446
| 0.063404
| 0.042892
| 0.051743
| 0.065541
| 0.865241
| 0.847313
| 0.822803
| 0.789715
| 0.781273
| 0.775917
| 0
| 0.007594
| 0.177665
| 32,989
| 945
| 77
| 34.908995
| 0.804519
| 0.147928
| 0
| 0.708271
| 0
| 0
| 0.017293
| 0
| 0
| 0
| 0
| 0
| 0.093233
| 1
| 0.099248
| false
| 0
| 0.018045
| 0
| 0.129323
| 0.01203
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c718991f1f96003fd727ab4ee84c4b3f2ee3bbbb
| 3,971
|
py
|
Python
|
Code/Functions/costFunctions.py
|
jeroenvanbaar/ReciprocityMotives
|
9a6708f723a15e3cc614be9f438be42c6cc1a715
|
[
"MIT"
] | 1
|
2021-01-19T23:50:13.000Z
|
2021-01-19T23:50:13.000Z
|
Code/Functions/costFunctions.py
|
jeroenvanbaar/ReciprocityMotives
|
9a6708f723a15e3cc614be9f438be42c6cc1a715
|
[
"MIT"
] | null | null | null |
Code/Functions/costFunctions.py
|
jeroenvanbaar/ReciprocityMotives
|
9a6708f723a15e3cc614be9f438be42c6cc1a715
|
[
"MIT"
] | 1
|
2021-01-19T23:50:39.000Z
|
2021-01-19T23:50:39.000Z
|
import numpy as np
import pandas as pd
import choiceModels
def MP_costfun(param,subDat,printStep=False,printPredictions=False,resid_share=False):
theta = param[0]
phi = param[1]
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.MP_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'baseMult'],
subDat.loc[trial,'exp'],
theta, phi)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print('theta = %.2f, phi = %.2f, SSE = %.2f'%(theta,phi,SSE))
if printPredictions == True:
print(subDat)
return residuals
def MP_costfun_ppSOE(param,subDat,printStep=False,printPredictions=False,resid_share=False):
theta = param[0]
phi = param[1]
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.MP_model_ppSOE(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'baseMult'],
subDat.loc[trial,'exp'],
theta, phi)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print('theta = %.2f, phi = %.2f, SSE = %.2f'%(theta,phi,SSE))
if printPredictions == True:
print(subDat)
return residuals
def IA_costfun(theta,subDat,printStep=False,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.IA_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
theta)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print('theta = %.2f, SSE = %.2f'%(theta,SSE))
if printPredictions == True:
print(subDat)
return residuals
def GA_costfun(theta,subDat,printStep=False,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.GA_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'baseMult'],
subDat.loc[trial,'exp'],
theta)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print('theta = %.2f, SSE = %.2f'%(theta,SSE))
if printPredictions == True:
print(subDat)
return residuals
def GR_costfun(subDat,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.GR_model()
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printPredictions == True:
print(subDat)
return residuals
| 38.182692
| 115
| 0.607152
| 469
| 3,971
| 5.093817
| 0.10661
| 0.184596
| 0.111344
| 0.087903
| 0.953956
| 0.953956
| 0.953956
| 0.934701
| 0.934701
| 0.917957
| 0
| 0.006125
| 0.218837
| 3,971
| 103
| 116
| 38.553398
| 0.764023
| 0
| 0
| 0.860215
| 0
| 0
| 0.106019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053763
| false
| 0
| 0.032258
| 0
| 0.139785
| 0.247312
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c73d96fc5c89c85272cadb5b6e47f219beaca910
| 6,602
|
py
|
Python
|
tests/test_registration.py
|
khanhha/python-pcl
|
0968b20a3a068bd63d1b67f128c73484e7108d57
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_registration.py
|
khanhha/python-pcl
|
0968b20a3a068bd63d1b67f128c73484e7108d57
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_registration.py
|
khanhha/python-pcl
|
0968b20a3a068bd63d1b67f128c73484e7108d57
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
import numpy as np
from numpy import cos, sin
from numpy.testing import assert_equal
import unittest
import pcl
# from pcl.pcl_registration import icp, gicp, icp_nl
from pcl import IterativeClosestPoint, GeneralizedIterativeClosestPoint, IterativeClosestPointNonLinear
class TestICP(unittest.TestCase):
def setUp(self):
# Check if ICP can find a mild rotation.
theta = [-.031, .4, .59]
rot_x = [[1, 0, 0],
[0, cos(theta[0]), -sin(theta[0])],
[0, sin(theta[0]), cos(theta[0])]]
rot_y = [[cos(theta[1]), 0, sin(theta[1])],
[0, 1, 0],
[-sin(theta[1]), 0, cos(theta[1])]]
rot_z = [[cos(theta[2]), -sin(theta[1]), 0],
[sin(theta[2]), cos(theta[1]), 0],
[0, 0, 1]]
transform = np.dot(rot_x, np.dot(rot_y, rot_z))
source = np.random.RandomState(42).randn(900, 3)
self.source = pcl.PointCloud(source.astype(np.float32))
target = np.dot(source, transform)
self.target = pcl.PointCloud(target.astype(np.float32))
def testICP(self):
icp = self.source.make_IterativeClosestPoint()
converged, transf, estimate, fitness = icp.icp(self.source, self.target, max_iter=1000)
self.assertTrue(converged is True)
self.assertLess(fitness, .1)
self.assertTrue(isinstance(transf, np.ndarray))
self.assertEqual(transf.shape, (4, 4))
self.assertFalse(np.any(transf[:3] == 0))
assert_equal(transf[3], [0, 0, 0, 1])
# XXX I think I misunderstand fitness, it's not equal to the following MSS.
# mss = (np.linalg.norm(estimate.to_array()
# - self.source.to_array(), axis=1) ** 2).mean()
# self.assertLess(mss, 1)
# print("------", algo)
# print("Converged: ", converged, "Estimate: ", estimate,
# "Fitness: ", fitness)
# print("Rotation: ")
# print(transf[0:3,0:3])
# print("Translation: ", transf[3, 0:3])
# print("---------")
class TestGICP(unittest.TestCase):
def setUp(self):
# Check if ICP can find a mild rotation.
theta = [-.031, .4, .59]
rot_x = [[1, 0, 0],
[0, cos(theta[0]), -sin(theta[0])],
[0, sin(theta[0]), cos(theta[0])]]
rot_y = [[cos(theta[1]), 0, sin(theta[1])],
[0, 1, 0],
[-sin(theta[1]), 0, cos(theta[1])]]
rot_z = [[cos(theta[2]), -sin(theta[1]), 0],
[sin(theta[2]), cos(theta[1]), 0],
[0, 0, 1]]
transform = np.dot(rot_x, np.dot(rot_y, rot_z))
source = np.random.RandomState(42).randn(900, 3)
self.source = pcl.PointCloud(source.astype(np.float32))
target = np.dot(source, transform)
self.target = pcl.PointCloud(target.astype(np.float32))
def testGICP(self):
gicp = self.source.make_GeneralizedIterativeClosestPoint()
converged, transf, estimate, fitness = gicp.gicp(self.source, self.target, max_iter=1000)
self.assertTrue(converged is True)
self.assertLess(fitness, .1)
self.assertTrue(isinstance(transf, np.ndarray))
self.assertEqual(transf.shape, (4, 4))
self.assertFalse(np.any(transf[:3] == 0))
assert_equal(transf[3], [0, 0, 0, 1])
# XXX I think I misunderstand fitness, it's not equal to the following
# MSS.
# mss = (np.linalg.norm(estimate.to_array()
# - self.source.to_array(), axis=1) ** 2).mean()
# self.assertLess(mss, 1)
# print("------", algo)
# print("Converged: ", converged, "Estimate: ", estimate, "Fitness: ", fitness)
# print("Rotation: ")
# print(transf[0:3,0:3])
# print("Translation: ", transf[3, 0:3])
# print("---------")
class TestICP_NL(unittest.TestCase):
def setUp(self):
# Check if ICP can find a mild rotation.
theta = [-.031, .4, .59]
rot_x = [[1, 0, 0],
[0, cos(theta[0]), -sin(theta[0])],
[0, sin(theta[0]), cos(theta[0])]]
rot_y = [[cos(theta[1]), 0, sin(theta[1])],
[0, 1, 0],
[-sin(theta[1]), 0, cos(theta[1])]]
rot_z = [[cos(theta[2]), -sin(theta[1]), 0],
[sin(theta[2]), cos(theta[1]), 0],
[0, 0, 1]]
transform = np.dot(rot_x, np.dot(rot_y, rot_z))
source = np.random.RandomState(42).randn(900, 3)
self.source = pcl.PointCloud(source.astype(np.float32))
target = np.dot(source, transform)
self.target = pcl.PointCloud(target.astype(np.float32))
def testICP_NL(self):
icp_nl = self.source.make_IterativeClosestPointNonLinear()
converged, transf, estimate, fitness = icp_nl.icp_nl(self.source, self.target, max_iter=1000)
self.assertTrue(converged is True)
self.assertLess(fitness, .1)
self.assertTrue(isinstance(transf, np.ndarray))
self.assertEqual(transf.shape, (4, 4))
self.assertFalse(np.any(transf[:3] == 0))
assert_equal(transf[3], [0, 0, 0, 1])
# XXX I think I misunderstand fitness, it's not equal to the following
# MSS.
# mss = (np.linalg.norm(estimate.to_array()
# - self.source.to_array(), axis=1) ** 2).mean()
# self.assertLess(mss, 1)
# print("------", algo)
# print("Converged: ", converged, "Estimate: ", estimate,
# "Fitness: ", fitness)
# print("Rotation: ")
# print(transf[0:3,0:3])
# print("Translation: ", transf[3, 0:3])
# print("---------")
def suite():
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(TestICP))
suite.addTests(unittest.makeSuite(TestGICP))
suite.addTests(unittest.makeSuite(TestICP_NL))
return suite
if __name__ == '__main__':
unittest.main()
| 40.503067
| 103
| 0.50515
| 762
| 6,602
| 4.304462
| 0.135171
| 0.012805
| 0.041159
| 0.027439
| 0.818293
| 0.77561
| 0.77561
| 0.77561
| 0.77561
| 0.77561
| 0
| 0.046122
| 0.339897
| 6,602
| 162
| 104
| 40.753086
| 0.706517
| 0.21342
| 0
| 0.709677
| 0
| 0
| 0.001552
| 0
| 0
| 0
| 0
| 0
| 0.204301
| 1
| 0.075269
| false
| 0
| 0.075269
| 0
| 0.193548
| 0.010753
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c78662a817927e9bb58a2146cefeab83e3a418e6
| 17,443
|
py
|
Python
|
tests/unit_tests/test_tethys_apps/test_management/test_commands/test_pre_collectstatic.py
|
ezrajrice/tethys
|
238271ebb09913f1f57b0d127fd5c81bb4780a0a
|
[
"BSD-2-Clause"
] | 79
|
2015-10-05T13:13:28.000Z
|
2022-02-01T12:30:33.000Z
|
tests/unit_tests/test_tethys_apps/test_management/test_commands/test_pre_collectstatic.py
|
ezrajrice/tethys
|
238271ebb09913f1f57b0d127fd5c81bb4780a0a
|
[
"BSD-2-Clause"
] | 542
|
2015-08-12T22:11:32.000Z
|
2022-03-29T22:18:08.000Z
|
tests/unit_tests/test_tethys_apps/test_management/test_commands/test_pre_collectstatic.py
|
Aquaveo/tethys
|
15f67c3fb9458d3af2733542be5ea6391f33b222
|
[
"BSD-2-Clause"
] | 71
|
2016-01-16T01:03:41.000Z
|
2022-03-31T17:55:54.000Z
|
import unittest
from unittest import mock
from tethys_apps.management.commands import pre_collectstatic
class ManagementCommandsPreCollectStaticTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_add_arguments(self):
mock_parser = mock.MagicMock()
cmd = pre_collectstatic.Command()
cmd.add_arguments(mock_parser)
add_arguments_calls = mock_parser.add_argument.call_args_list
self.assertEqual(1, len(add_arguments_calls))
self.assertIn('-l', add_arguments_calls[0][0])
self.assertIn('--link', add_arguments_calls[0][0])
@mock.patch('tethys_apps.management.commands.pre_collectstatic.print')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.exit')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.settings')
def test_handle_no_static_root(self, mock_settings, mock_exit, mock_print):
mock_settings.STATIC_ROOT = None
# NOTE: to prevent our tests from exiting prematurely, we change the behavior of exit to raise an exception
# to break the code execution, which we catch below.
mock_exit.side_effect = SystemExit
cmd = pre_collectstatic.Command()
self.assertRaises(SystemExit, cmd.handle)
print_args = mock_print.call_args_list
msg_warning = 'WARNING: Cannot find the STATIC_ROOT setting. Please provide the ' \
'path to the static directory using the STATIC_ROOT setting in the portal_config.yml ' \
'file and try again.'
self.assertEqual(msg_warning, print_args[0][0][0])
@mock.patch('tethys_apps.management.commands.pre_collectstatic.print')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.shutil.copytree')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.path.isdir')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.remove')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_extensions')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_apps')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.settings')
def test_handle__not_named_static_or_public(self, mock_settings, mock_get_apps, mock_get_extensions, mock_os_remove,
mock_os_path_isdir, mock_shutil_copytree, mock_print):
options = {'link': False} # Don't create symbolic link (copy instead)
static_root_dir = '/foo/static/root'
app_source_dir = '/foo/sources/foo_app'
ext_source_dir = '/foo/sources/foo_ext'
app_public_dir = app_source_dir + '/public'
ext_public_dir = ext_source_dir + '/public'
app_static_dir = app_source_dir + '/static'
ext_static_dir = ext_source_dir + '/static'
mock_settings.STATIC_ROOT = static_root_dir
mock_get_apps.return_value = {'foo_app': app_source_dir}
mock_get_extensions.return_value = {'foo_ext': ext_source_dir}
mock_os_remove.return_value = True # Successfully remove old link or dir with os.remove
mock_os_path_isdir.side_effect = (False, False, False, False) # "public" and "static" path don't exist
cmd = pre_collectstatic.Command()
cmd.handle(**options)
# Verify apps and extensions were gathered
mock_get_apps.assert_called_once()
mock_get_extensions.assert_called_once()
# Verify check for public dir was performed for app and extension
mock_os_path_isdir.assert_any_call(app_public_dir)
mock_os_path_isdir.assert_any_call(ext_public_dir)
mock_os_path_isdir.assert_any_call(app_static_dir)
mock_os_path_isdir.assert_any_call(ext_static_dir)
# Verify attempt to remove old dirs/links
mock_os_remove.assert_not_called()
# Verify attempt to copy public dir to static root location
mock_shutil_copytree.assert_not_called()
# Verify messages
print_args = mock_print.call_args_list
msg = 'INFO: Collecting static and public directories of apps and extensions to "{0}".' \
.format(mock_settings.STATIC_ROOT)
msg_info_first = 'WARNING: Cannot find a directory named "static" or "public" for app "foo_app". Skipping...'
msg_info_second = 'WARNING: Cannot find a directory named "static" or "public" for app "foo_ext". Skipping...'
check_list = []
for i in range(len(print_args)):
check_list.append(print_args[i][0][0])
self.assertIn(msg, check_list)
self.assertIn(msg_info_first, check_list)
self.assertIn(msg_info_second, check_list)
msg_warning_not_in = 'WARNING: Cannot find the STATIC_ROOT setting'
msg_not_in = 'Please provide the path to the static directory'
info_not_in_first = 'INFO: Successfully copied static directory to STATIC_ROOT for app "foo_app".'
info_not_in_second = 'INFO: Successfully copied static directory to STATIC_ROOT for app "foo_ext".'
for i in range(len(print_args)):
self.assertNotEqual(msg_warning_not_in, print_args[i][0][0])
self.assertNotEqual(msg_not_in, print_args[i][0][0])
self.assertNotEqual(info_not_in_first, print_args[i][0][0])
self.assertNotEqual(info_not_in_second, print_args[i][0][0])
@mock.patch('tethys_apps.management.commands.pre_collectstatic.print')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.shutil.copytree')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.path.isdir')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.shutil.rmtree')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.remove')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_extensions')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_apps')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.settings')
def test_handle__public__remove_fail__rmtree_fail(self, mock_settings, mock_get_apps, mock_get_extensions,
mock_os_remove, mock_shutil_rmtree, mock_os_path_isdir,
mock_shutil_copytree, mock_print):
options = {'link': False} # Don't create symbolic link (copy instead)
static_root_dir = '/foo/static/root'
app_source_dir = '/foo/sources/foo_app'
ext_source_dir = '/foo/sources/foo_ext'
app_public_dir = app_source_dir + '/public'
ext_public_dir = ext_source_dir + '/public'
app_static_root_dir = static_root_dir + '/foo_app'
ext_static_root_dir = static_root_dir + '/foo_ext'
mock_settings.STATIC_ROOT = static_root_dir
mock_get_apps.return_value = {'foo_app': app_source_dir}
mock_get_extensions.return_value = {'foo_ext': ext_source_dir}
mock_os_remove.side_effect = OSError # remove fails
mock_shutil_rmtree.side_effect = OSError # rmtree fails
mock_os_path_isdir.side_effect = (True, True) # "public" dir found
cmd = pre_collectstatic.Command()
cmd.handle(**options)
# Verify apps and extensions were gathered
mock_get_apps.assert_called_once()
mock_get_extensions.assert_called_once()
# Verify check for public dir was performed for app and extension
mock_os_path_isdir.assert_any_call(app_public_dir)
mock_os_path_isdir.assert_any_call(ext_public_dir)
# Verify attempt to remove old dirs/links
mock_os_remove.assert_any_call(app_static_root_dir)
mock_os_remove.assert_any_call(ext_static_root_dir)
mock_shutil_rmtree.assert_any_call(app_static_root_dir)
mock_shutil_rmtree.assert_any_call(ext_static_root_dir)
# Verify attempt to copy public dir to static root location
mock_shutil_copytree.assert_any_call(app_public_dir, app_static_root_dir)
mock_shutil_copytree.assert_any_call(ext_public_dir, ext_static_root_dir)
# Verify messages
print_args = mock_print.call_args_list
msg = 'INFO: Collecting static and public directories of apps and extensions to "{0}".' \
.format(mock_settings.STATIC_ROOT)
msg_info_first = 'INFO: Successfully copied public directory to STATIC_ROOT for app "foo_app".'
msg_info_second = 'INFO: Successfully copied public directory to STATIC_ROOT for app "foo_ext".'
check_list = []
for i in range(len(print_args)):
check_list.append(print_args[i][0][0])
self.assertIn(msg, check_list)
self.assertIn(msg_info_first, check_list)
self.assertIn(msg_info_second, check_list)
msg_warning_not_in = 'WARNING: Cannot find the STATIC_ROOT setting'
msg_not_in = 'Please provide the path to the static directory'
info_not_in_first = 'INFO: Successfully linked static directory to STATIC_ROOT for app "foo_app".'
info_not_in_second = 'INFO: Successfully linked static directory to STATIC_ROOT for app "foo_ext".'
for i in range(len(print_args)):
self.assertNotEqual(msg_warning_not_in, print_args[i][0][0])
self.assertNotEqual(msg_not_in, print_args[i][0][0])
self.assertNotEqual(info_not_in_first, print_args[i][0][0])
self.assertNotEqual(info_not_in_second, print_args[i][0][0])
@mock.patch('tethys_apps.management.commands.pre_collectstatic.print')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.shutil.copytree')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.path.isdir')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.remove')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_extensions')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_apps')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.settings')
def test_handle__named_public__copy(self, mock_settings, mock_get_apps, mock_get_extensions, mock_os_remove,
mock_os_path_isdir, mock_shutil_copytree, mock_print):
options = {'link': False} # Don't create symbolic link (copy instead)
static_root_dir = '/foo/static/root'
app_source_dir = '/foo/sources/foo_app'
app_public_dir = app_source_dir + '/public'
ext_source_dir = '/foo/sources/foo_ext'
ext_public_dir = ext_source_dir + '/public'
app_static_root_dir = static_root_dir + '/foo_app'
ext_static_root_dir = static_root_dir + '/foo_ext'
mock_settings.STATIC_ROOT = static_root_dir
mock_get_apps.return_value = {'foo_app': app_source_dir}
mock_get_extensions.return_value = {'foo_ext': ext_source_dir}
mock_os_remove.return_value = True # Successfully remove old link or dir with os.remove
mock_os_path_isdir.side_effect = (True, True) # "public" test path exists
cmd = pre_collectstatic.Command()
cmd.handle(**options)
# Verify apps and extensions were gathered
mock_get_apps.assert_called_once()
mock_get_extensions.assert_called_once()
# Verify check for public dir was performed for app and extension
mock_os_path_isdir.assert_any_call(app_public_dir)
mock_os_path_isdir.assert_any_call(ext_public_dir)
# Verify attempt to remove old dirs/links
mock_os_remove.assert_any_call(app_static_root_dir)
mock_os_remove.assert_any_call(ext_static_root_dir)
# Verify attempt to copy public dir to static root location
mock_shutil_copytree.assert_any_call(app_public_dir, app_static_root_dir)
mock_shutil_copytree.assert_any_call(ext_public_dir, ext_static_root_dir)
# Verify messages
print_args = mock_print.call_args_list
msg = 'INFO: Collecting static and public directories of apps and extensions to "{0}".'\
. format(mock_settings.STATIC_ROOT)
msg_info_first = 'INFO: Successfully copied public directory to STATIC_ROOT for app "foo_app".'
msg_info_second = 'INFO: Successfully copied public directory to STATIC_ROOT for app "foo_ext".'
check_list = []
for i in range(len(print_args)):
check_list.append(print_args[i][0][0])
self.assertIn(msg, check_list)
self.assertIn(msg_info_first, check_list)
self.assertIn(msg_info_second, check_list)
msg_warning_not_in = 'WARNING: Cannot find the STATIC_ROOT setting'
msg_not_in = 'Please provide the path to the static directory'
info_not_in_first = 'INFO: Successfully linked static directory to STATIC_ROOT for app "foo_app".'
info_not_in_second = 'INFO: Successfully linked static directory to STATIC_ROOT for app "foo_ext".'
for i in range(len(print_args)):
self.assertNotEqual(msg_warning_not_in, print_args[i][0][0])
self.assertNotEqual(msg_not_in, print_args[i][0][0])
self.assertNotEqual(info_not_in_first, print_args[i][0][0])
self.assertNotEqual(info_not_in_second, print_args[i][0][0])
@mock.patch('tethys_apps.management.commands.pre_collectstatic.print')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.symlink')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.path.isdir')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.os.remove')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_extensions')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.get_installed_tethys_apps')
@mock.patch('tethys_apps.management.commands.pre_collectstatic.settings')
def test_handle__named_static__link(self, mock_settings, mock_get_apps, mock_get_extensions, mock_os_remove,
mock_os_path_isdir, mock_os_symlink, mock_print):
options = {'link': True} # Create symbolic link (instead of copy)
static_root_dir = '/foo/static/root'
app_source_dir = '/foo/sources/foo_app'
ext_source_dir = '/foo/sources/foo_ext'
app_static_root_dir = static_root_dir + '/foo_app'
ext_static_root_dir = static_root_dir + '/foo_ext'
app_public_dir = app_source_dir + '/public'
ext_public_dir = ext_source_dir + '/public'
app_static_dir = app_source_dir + '/static'
ext_static_dir = ext_source_dir + '/static'
mock_settings.STATIC_ROOT = static_root_dir
mock_get_apps.return_value = {'foo_app': app_source_dir}
mock_get_extensions.return_value = {'foo_ext': ext_source_dir}
mock_os_remove.return_value = True # Successfully remove old link or dir with os.remove
mock_os_path_isdir.side_effect = (False, True, False, True) # "public" path doesn't exist, "static" path does
cmd = pre_collectstatic.Command()
cmd.handle(**options)
# Verify apps and extensions were gathered
mock_get_apps.assert_called_once()
mock_get_extensions.assert_called_once()
# Verify check for public dir was performed for app and extension
mock_os_path_isdir.assert_any_call(app_public_dir)
mock_os_path_isdir.assert_any_call(ext_public_dir)
mock_os_path_isdir.assert_any_call(app_static_dir)
mock_os_path_isdir.assert_any_call(ext_static_dir)
# Verify attempt to remove old dirs/links
mock_os_remove.assert_any_call(app_static_root_dir)
mock_os_remove.assert_any_call(ext_static_root_dir)
# Verify attempt to copy public dir to static root location
mock_os_symlink.assert_any_call(app_static_dir, app_static_root_dir)
mock_os_symlink.assert_any_call(ext_static_dir, ext_static_root_dir)
# Verify messages
print_args = mock_print.call_args_list
msg = 'INFO: Collecting static and public directories of apps and extensions to "{0}".' \
.format(mock_settings.STATIC_ROOT)
msg_info_first = 'INFO: Successfully linked public directory to STATIC_ROOT for app "foo_app".'
msg_info_second = 'INFO: Successfully linked public directory to STATIC_ROOT for app "foo_ext".'
check_list = []
for i in range(len(print_args)):
check_list.append(print_args[i][0][0])
self.assertIn(msg, check_list)
self.assertIn(msg_info_first, check_list)
self.assertIn(msg_info_second, check_list)
msg_warning_not_in = 'WARNING: Cannot find the STATIC_ROOT setting'
msg_not_in = 'Please provide the path to the static directory'
info_not_in_first = 'INFO: Successfully copied static directory to STATIC_ROOT for app "foo_app".'
info_not_in_second = 'INFO: Successfully copied static directory to STATIC_ROOT for app "foo_ext".'
for i in range(len(print_args)):
self.assertNotEqual(msg_warning_not_in, print_args[i][0][0])
self.assertNotEqual(msg_not_in, print_args[i][0][0])
self.assertNotEqual(info_not_in_first, print_args[i][0][0])
self.assertNotEqual(info_not_in_second, print_args[i][0][0])
| 50.706395
| 120
| 0.712205
| 2,405
| 17,443
| 4.796674
| 0.06736
| 0.062413
| 0.038315
| 0.080097
| 0.899879
| 0.894244
| 0.886876
| 0.881501
| 0.872053
| 0.861737
| 0
| 0.003735
| 0.2018
| 17,443
| 343
| 121
| 50.854227
| 0.824822
| 0.086453
| 0
| 0.813008
| 0
| 0.00813
| 0.28788
| 0.128876
| 0
| 0
| 0
| 0
| 0.280488
| 1
| 0.03252
| false
| 0.00813
| 0.012195
| 0
| 0.04878
| 0.178862
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7870024f686a1ddeb890965f46e2b9e3371f502
| 30,940
|
py
|
Python
|
awp5/api/job.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | 2
|
2019-04-10T16:46:19.000Z
|
2020-08-18T21:57:59.000Z
|
awp5/api/job.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
awp5/api/job.py
|
ThomasWaldinger/py_awp5
|
10077ab81eab506bea58a67242c2d550988ec18c
|
[
"Apache-2.0"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Thomas Waldinger. All rights reserved.
# Licensed under the Apache License, Version 2.0. See
# License.txt in the project root for license
# information.
# ---------------
"""
Job
The Job resource tracks jobs submitted to the P5 server. Information about each
of the submitted jobs is held indefinitely and can be queried by the user at
any time. Job resources are generated automatically, for instance by the submit
methods of the ArchiveSelection resource.
"""
from awp5.base.connection import P5Resource, exec_nsdchat
from awp5.base.helpers import resourcelist, onereturnvalue
module_name = "Job"
def names(as_object=False, p5_connection=None):
"""
Syntax: Job names
Description: Returns a list of all currently scheduled or running jobs
Return Values:
-On Success: the names of currently scheduled or running jobs
the string "<empty>" if no jobs are scheduled
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
def completed(lastdays=None, as_object=False, p5_connection=None):
"""
Syntax: Job completed [<lastdays>]
Description: Returns the names of all jobs completed by the system.
If the optional <lastdays> argument is not given, jobs completed today are
returned.
Otherwise, all completed jobs for the last <lastdays> days are returned.
The <lastdays> argument is interpreted as a positive integer (the default
is 0 meaning today).
Return Values:
-On Success: the names of completed jobs or
the string "<empty>" if no jobs completed
in the given time.
"""
method_name = "completed"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def completion(job_name, p5_connection=None):
"""
Syntax: Job <name> completion
Description: Returns the completion code of the completed job. The
completion code can be one of:
• success
• exception
• failure
The success completion code means that the job has completed successfully
in its entirety. It does not mean that all of the files have been archived
and/or restored, though. For info about the particular file, use the
protocol method.
The exception completion code means that parts of the job have failed, but
the job may have been partially executed successfully. This happens for
parallel archive/restore operations where one of the job threads runs into
an error, while others continue to run and finish successfully.
The failure completion code means that the job has failed in its entirety
and none of the files have been processed (archived/restored) correctly.
Return Values:
-On Success: one of the completion codes
"""
method_name = "completion"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
@onereturnvalue
def describe(job_name, p5_connection=None):
"""
Syntax: Job <name> describe
Description: Returns a (human readable) job description as shown in the P5
job monitor.
Return Values:
-On Success: the job description
"""
method_name = "describe"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
def failed(lastdays=None, as_object=False, p5_connection=None):
"""
Syntax: Job failed [<lastdays>]
Description: Returns the names of all the jobs that failed to execute. If
no optional argument <lastdays> is given, it returns jobs that failed
today.
Otherwise, all failed jobs for the last <lastdays> days are returned.
The <lastdays> argument is interpreted as a positive integer (0 means
today).
Return Values:
-On Success: the names of failed jobs
the string "<empty>" if no jobs failed
"""
method_name = "failed"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def inventory(job_name, outputfile, options=None, p5_connection=None):
"""
Syntax: Job <name> inventory <output file> [<options>]
Description: Outputs a list of the files saved by the Archive-Job <name>
into a file.
The <output file> must be in the form [client:]absolute_path whereby client
is the name of the P5 client where to store the file and absolute_path is
the complete path to the file to hold the output. The client part is
optional and defaults to localhost:
The inventory command fills in the passed file with lines containing
records separated by a TAB. If no <options> are given, the output file will
by default contain the index paths of all the files saved by the given job
<name>, one record per line. Additional <options> represent the attributes
that will be output for each file in a tab-separated format. These
attributes may be system attributes or any user-defined meta-data fields.
The supported system attributes are:
ppath:
volumes:
size:
handle:
btime:
mtime:
ino:
the physical path of the file on the filesystem
a blank separated list of the volumes where the file is saved
the size of the saved file
the handle as required by the RestoreSelection
the backup time of the file
the file's modification time
the inode number of the file
The index path returned by the inventory command cannot be used to access
files on the file system in general. There are special cases where this
might be used for this purpose, but generally it is not supported. The idea
behind this info is to have an overview or idea what is being stored in the
index and not to consume it in some other fashion (i.e. address the files
on the file system to post-process them).
In cases where files are still expected to be in the file system at the
place they were at the point of archiving (for example somebody wants to
delete them or otherwise post-process them) the ppath attribute may be
used, which, when given on the command line, will yield the physical path
as-found on the client where the file resides. Note that not all index
entries have corresponding physical paths. In such cases the value will
be set to the string "<empty>".
Return Values:
-On Success: the <client>:<output file>
"""
method_name = "inventory"
return exec_nsdchat([module_name, job_name, method_name, outputfile,
options], p5_connection)
@onereturnvalue
def label(job_name, p5_connection=None):
"""
Syntax: Job <name> label
Description: Returns the (human readable) job label.
The following labels are returned:
Archive, Backup, Synchronize and System.
A Job label can be used in conjunction with the Job describe command to
better display the job record in various list displays.
Return Values:
-On Success: the job label
"""
method_name = "label"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
def pending(as_object=False, p5_connection=None):
"""
Syntax: Job pending
Description: Returns the names of all the jobs waiting to be executed, i.e.
jobs that are still in the queue waiting to be scheduled and jobs that are
already scheduled but wait for the next free worker thread.
Return Values:
-On Success: the names of currently waiting jobs
the string "<empty>" if no jobs are waiting
"""
method_name = "pending"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def protocol(job_name, archiveentry=None, p5_connection=None):
"""
Syntax: Job <name> protocol [<archiveentry>]
Description: Returns a completion protocol of the completed job and/or of
one of the archived and/or restored file(s) given by the optional
<archiveentry> argument. The protocol contains human readable text.
Return Values:
-On Success: the requested protocol
"""
method_name = "protocol"
return exec_nsdchat([module_name, job_name, method_name, archiveentry],
p5_connection)
@onereturnvalue
def report(job_name, p5_connection=None):
"""
Syntax: Job <name> report
Description: Returns a report of the currently running job. The report
contains human readable text.
Return Values:
-On Success: the report text
"""
method_name = "report"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
@onereturnvalue
def resourcegroup(job_name, p5_connection=None):
"""
Syntax: Job <name> resourcegroup
Description: Returns the name of the resource group for which this job has
been running.
Return Values:
-On Success: the name of the resource group
(for example ArchivePlan, SyncPlan, etc.)
or the string "<empty>",
if no resource group is associated with the job
"""
method_name = "resourcegroup"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
@onereturnvalue
def resourcename(job_name, p5_connection=None):
"""
Syntax: Job <name> resourcename
Description: Returns the name of the resource for which this job has been
running
Return Values:
-On Success: the name of the resource
(for example Default-Backup, Default-Archive)
or the string "<empty>",
if no resource group is associated with the job
"""
method_name = "resourcename"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
def running(as_object=False, p5_connection=None):
"""
Syntax: Job running
Description: Returns the names of all currently running jobs.
Return Values:
-On Success: the names of currently running jobs
the string "<empty>" if no jobs are running
"""
method_name = "running"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def status(job_name, p5_connection=None):
"""
Syntax: Job <name> status
Description: Returns the status of the job. A job can have a number of
internal statuses, depending on the stage of the archive and/or restore
process. Currently, the following statuses are supported:
• started the job is starting (intermediate state)
• stopped the job is stopping (intermediate state)
• unknown the job is not known by the system
• scheduled the job is in the queue waiting to be run
• pending an intermediate state during start,
the job is waiting to be accepted for start
by the queue manager
• running the job is running
• canceled the job is canceled by user
• completed the job is completed
• terminated the job is terminated by a server shutdown
Return Values:
-On Success: one of the supported statuses
"""
method_name = "status"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
def warning(lastdays=None, as_object=False, p5_connection=None):
"""
Syntax: Job warning [<lastdays>]
Description: Returns names of all jobs with warnings. If no optional
argument <lastdays> is given, it returns jobs with warnings from today.
Otherwise, all jobs with warnings for the last <lastdays> days are
returned.
The <lastdays> argument is interpreted as a positive integer (0 = today).
Return Values:
-On Success: the names of jobs with warnings
the string "<empty>" if no jobs ended with a warning
"""
method_name = "warning"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def xmlticket(job_name, outputfile=None, p5_connection=None):
"""
Syntax: Job <name> xmlticket [<outfilename>]
Description: Returns the completion protocol of the completed job.
The protocol contains human readable text embedded in generic XML sections.
If the optional <outfilename> argument is given, the output of the command
is rerouted to the given file.
Return Values:
-On Success: the requested protocol
"""
method_name = "xmlticket"
return exec_nsdchat([module_name, job_name, method_name,
outputfile], p5_connection)
@onereturnvalue
def cancel(job_name, p5_connection=None):
"""
Syntax: Job <name> cancel
Description: Cancels the running job. Only jobs that have the running
status can be canceled. An attempt to cancel a job with a different status
will result in an error.
Return Values:
-On Success: the string "1" if the job is canceled
the string "0" if the job could not be canceled
for whatever reason
"""
method_name = "cancel"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
@onereturnvalue
def runat(job_name, p5_connection=None):
"""
Syntax: Job <name> runat
Description: Returns the time in seconds (Posix time) when the job was
scheduled to run.
Return Values:
-On Success: the time
"""
method_name = "runat"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
@onereturnvalue
def stop(job_name, p5_connection=None):
"""
Syntax: Job <name> stop
Description: Stops the scheduled job. Only jobs that have the scheduled
status can be stopped. An attempt to stop a job with a different status
will result in an error.
Return Values:
-On Success: the string "1" if the job is stopped
the string "0" if the job could not be stopped
for whatever reason
"""
method_name = "stop"
return exec_nsdchat([module_name, job_name, method_name], p5_connection)
class Job(P5Resource):
def __init__(self, job_name, p5_connection=None):
super().__init__(job_name, p5_connection)
def names(as_object=True, p5_connection=None):
"""
Syntax: Job names
Description: Returns a list of all currently scheduled or running jobs
Return Values:
-On Success: the names of currently scheduled or running jobs
the string "<empty>" if no jobs are scheduled
"""
method_name = "names"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
def completed(lastdays=None, as_object=True, p5_connection=None):
"""
Syntax: Job completed [<lastdays>]
Description: Returns the names of all jobs completed by the system.
If the optional <lastdays> argument is not given, jobs completed today
are returned.
Otherwise, all completed jobs for the last <lastdays> days are
returned. The <lastdays> argument is interpreted as a positive integer
(the default is 0 meaning today).
Return Values:
-On Success: the names of completed jobs or
the string "<empty>" if no jobs completed
in the given time.
"""
method_name = "completed"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def completion(self):
"""
Syntax: Job <name> completion
Description: Returns the completion code of the completed job. The
completion code can be one of:
• success
• exception
• failure
The success completion code means that the job has completed
successfully in its entirety. It does not mean that all of the files
have been archived and/or restored, though. For info about the
particular file, use the protocol method.
The exception completion code means that parts of the job have failed,
but the job may have been partially executed successfully. This happens
for parallel archive/restore operations where one of the job threads
runs into an error, while others continue to run and finish
successfully. The failure completion code means that the job has failed
in its entirety and none of the files have been processed
(archived/restored) correctly.
Return Values:
-On Success: one of the completion codes
"""
method_name = "completion"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def describe(self):
"""
Syntax: Job <name> describe
Description: Returns a (human readable) job description as shown in the
P5 job monitor.
Return Values:
-On Success: the job description
"""
method_name = "describe"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def failed(lastdays=None, as_object=True, p5_connection=None):
"""
Syntax: Job failed [<lastdays>]
Description: Returns the names of all the jobs that failed to execute.
If no optional argument <lastdays> is given, it returns jobs that
failed today.
Otherwise, all failed jobs for the last <lastdays> days are returned.
The <lastdays> argument is interpreted as a positive integer
(0 means today).
Return Values:
-On Success: the names of failed jobs
the string "<empty>" if no jobs failed
"""
method_name = "failed"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def inventory(self, outputfile, options=None):
"""
Syntax: Job <name> inventory <output file> [<options>]
Description: Outputs a list of the files saved by the Archive-Job
<name> into a file.
The <output file> must be in the form [client:]absolute_path whereby
client is the name of the P5 client where to store the file and
absolute_path is the complete path to the file to hold the output. The
client part is optional and defaults to localhost:
The inventory command fills in the passed file with lines containing
records separated by a TAB. If no <options> are given, the output file
will by default contain the index paths of all the files saved by the
given job <name>, one record per line. Additional <options> represent
the attributes that will be output for each file in a tab-separated
format. These attributes may be system attributes or any user-defined
meta-data fields.
The supported system attributes are:
ppath:
volumes:
size:
handle:
btime:
mtime:
ino:
the physical path of the file on the filesystem
a blank separated list of the volumes where the file is saved
the size of the saved file
the handle as required by the RestoreSelection
the backup time of the file
the file's modification time
the inode number of the file
The index path returned by the inventory command cannot be used to
access files on the file system in general. There are special cases
where this might be used for this purpose, but generally it is not
supported. The idea behind this info is to have an overview or idea
what is being stored in the index and not to consume it in some other
fashion (i.e. address the files on the file system to post-process
them).
In cases where files are still expected to be in the file system at the
place they were at the point of archiving (for example somebody wants
to delete them or otherwise post-process them) the ppath attribute may
be used, which, when given on the command line, will yield the physical
path as-found on the client where the file resides. Note that not all
index entries have corresponding physical paths. In such cases the
value will be set to the string "<empty>".
Return Values:
-On Success: the <client>:<output file>
"""
method_name = "inventory"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name, outputfile,
options])
@onereturnvalue
def label(self):
"""
Syntax: Job <name> label
Description: Returns the (human readable) job label.
The following labels are returned:
Archive, Backup, Synchronize and System.
A Job label can be used in conjunction with the Job describe command to
better display the job record in various list displays.
Return Values:
-On Success: the job label
"""
method_name = "label"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def pending(as_object=True, p5_connection=None):
"""
Syntax: Job pending
Description: Returns the names of all the jobs waiting to be executed,
i.e. jobs that are still in the queue waiting to be scheduled and jobs
that are already scheduled but wait for the next free worker thread.
Return Values:
-On Success: the names of currently waiting jobs
the string "<empty>" if no jobs are waiting
"""
method_name = "pending"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def protocol(self, archiveentry=None):
"""
Syntax: Job <name> protocol [<archiveentry>]
Description: Returns a completion protocol of the completed job and/or
of one of the archived and/or restored file(s) given by the optional
<archiveentry> argument. The protocol contains human readable text.
Return Values:
-On Success: the requested protocol
"""
method_name = "protocol"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name, archiveentry])
@onereturnvalue
def report(self):
"""
Syntax: Job <name> report
Description: Returns a report of the currently running job. The report
contains human readable text.
Return Values:
-On Success: the report text
"""
method_name = "report"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def resourcegroup(self):
"""
Syntax: Job <name> resourcegroup
Description: Returns the name of the resource group for which this job
has been running.
Return Values:
-On Success: the name of the resource group
(for example ArchivePlan, SyncPlan, etc.)
or the string "<empty>",
if no resource group is associated with the job
"""
method_name = "resourcegroup"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def resourcename(self):
"""
Syntax: Job <name> resourcename
Description: Returns the name of the resource for which this job has
been running
Return Values:
-On Success: the name of the resource
(for example Default-Backup, Default-Archive)
or the string "<empty>",
if no resource group is associated with the job
"""
method_name = "resourcename"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def running(as_object=True, p5_connection=None):
"""
Syntax: Job running
Description: Returns the names of all currently running jobs.
Return Values:
-On Success: the names of currently running jobs
the string "<empty>" if no jobs are running
"""
method_name = "running"
result = exec_nsdchat([module_name, method_name], p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def status(self):
"""
Syntax: Job <name> status
Description: Returns the status of the job. A job can have a number of
internal statuses, depending on the stage of the archive and/or restore
process. Currently, the following statuses are supported:
• started the job is starting (intermediate state)
• stopped the job is stopping (intermediate state)
• unknown the job is not known by the system
• scheduled the job is in the queue waiting to be run
• pending an intermediate state during start,
the job is waiting to be accepted for start
by the queue manager
• running the job is running
• canceled the job is canceled by user
• completed the job is completed
• terminated the job is terminated by a server shutdown
Return Values:
-On Success: one of the supported statuses
"""
method_name = "status"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def warning(lastdays=None, as_object=True, p5_connection=None):
"""
Syntax: Job warning [<lastdays>]
Description: Returns names of all jobs with warnings. If no optional
argument <lastdays> is given, it returns jobs with warnings from today.
Otherwise, all jobs with warnings for the last <lastdays> days are
returned.
The <lastdays> argument is interpreted as a positive integer
(0 = today).
Return Values:
-On Success: the names of jobs with warnings
the string "<empty>" if no jobs ended with a warning
"""
method_name = "warning"
result = exec_nsdchat([module_name, method_name, lastdays],
p5_connection)
if not as_object:
return result
else:
return resourcelist(result, Job, p5_connection)
@onereturnvalue
def xmlticket(self, outputfile=None):
"""
Syntax: Job <name> xmlticket [<outfilename>]
Description: Returns the completion protocol of the completed job.
The protocol contains human readable text embedded in generic XML
sections.
If the optional <outfilename> argument is given, the output of the
command is rerouted to the given file.
Return Values:
-On Success: the requested protocol
"""
method_name = "xmlticket"
outputfile_option = ""
if outputfile:
outputfile_option = outputfile
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name,
outputfile_option])
@onereturnvalue
def cancel(self):
"""
Syntax: Job <name> cancel
Description: Cancels the running job. Only jobs that have the running
status can be canceled. An attempt to cancel a job with a different
status will result in an error.
Return Values:
-On Success: the string "1" if the job is canceled
the string "0" if the job could not be canceled
for whatever reason
"""
method_name = "cancel"
return self.p5_connection.nsdchat_call([module_name, job_name,
method_name])
@onereturnvalue
def runat(self):
"""
Syntax: Job <name> runat
Description: Returns the time in seconds (Posix time) when the job was
scheduled to run.
Return Values:
-On Success: the time
"""
method_name = "runat"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
@onereturnvalue
def stop(self):
"""
Syntax: Job <name> stop
Description: Stops the scheduled job. Only jobs that have the scheduled
status can be stopped. An attempt to stop a job with a different status
will result in an error.
Return Values:
-On Success: the string "1" if the job is stopped
the string "0" if the job could not be stopped
for whatever reason
"""
method_name = "stop"
return self.p5_connection.nsdchat_call([module_name, self.name,
method_name])
def __repr__(self):
return ": ".join([module_name, self.name])
| 40.07772
| 79
| 0.639237
| 3,948
| 30,940
| 4.938703
| 0.091439
| 0.047389
| 0.027285
| 0.040927
| 0.943379
| 0.940609
| 0.938404
| 0.935942
| 0.918402
| 0.916453
| 0
| 0.00471
| 0.300065
| 30,940
| 771
| 80
| 40.129702
| 0.894533
| 0.5968
| 0
| 0.730942
| 0
| 0
| 0.030067
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.179372
| false
| 0
| 0.008969
| 0.004484
| 0.421525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7bcb664bcc58934dd16f4662e4af550f74af2c8
| 71,464
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_tcp_cfg.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_tcp_cfg.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ip_tcp_cfg.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-07-22T04:04:44.000Z
|
2020-07-22T04:04:44.000Z
|
""" Cisco_IOS_XR_ip_tcp_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ip\-tcp package configuration.
This module contains definitions
for the following management objects\:
ip\-tcp\: Global IP TCP configuration
ip\: ip
Copyright (c) 2013\-2017 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class IpTcp(Entity):
"""
Global IP TCP configuration
.. attribute:: directory
TCP directory details
**type**\: :py:class:`Directory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.IpTcp.Directory>`
**presence node**\: True
.. attribute:: throttle
Throttle TCP receive buffer (in percentage)
**type**\: :py:class:`Throttle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.IpTcp.Throttle>`
**presence node**\: True
.. attribute:: num_thread
TCP InQueue and OutQueue threads
**type**\: :py:class:`NumThread <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.IpTcp.NumThread>`
**presence node**\: True
.. attribute:: accept_rate
TCP connection accept rate
**type**\: int
**range:** 1..1000
**default value**\: 500
.. attribute:: selective_ack
Enable TCP selective\-ACK
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: window_size
TCP receive window size (bytes)
**type**\: int
**range:** 2048..65535
**units**\: byte
.. attribute:: receive_q
TCP receive Queue Size
**type**\: int
**range:** 40..800
.. attribute:: maximum_segment_size
TCP initial maximum segment size
**type**\: int
**range:** 68..10000
.. attribute:: syn_wait_time
Time to wait on new TCP connections in seconds
**type**\: int
**range:** 5..30
**units**\: second
.. attribute:: timestamp
Enable TCP timestamp option
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: path_mtu_discovery
Aging time; 0 for infinite, and range be (10,30)
**type**\: int
**range:** \-2147483648..2147483647
**units**\: minute
**default value**\: 10
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(IpTcp, self).__init__()
self._top_entity = None
self.yang_name = "ip-tcp"
self.yang_parent_name = "Cisco-IOS-XR-ip-tcp-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("directory", ("directory", IpTcp.Directory)), ("throttle", ("throttle", IpTcp.Throttle)), ("num-thread", ("num_thread", IpTcp.NumThread))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('accept_rate', YLeaf(YType.uint32, 'accept-rate')),
('selective_ack', YLeaf(YType.empty, 'selective-ack')),
('window_size', YLeaf(YType.uint32, 'window-size')),
('receive_q', YLeaf(YType.uint32, 'receive-q')),
('maximum_segment_size', YLeaf(YType.uint32, 'maximum-segment-size')),
('syn_wait_time', YLeaf(YType.uint32, 'syn-wait-time')),
('timestamp', YLeaf(YType.empty, 'timestamp')),
('path_mtu_discovery', YLeaf(YType.int32, 'path-mtu-discovery')),
])
self.accept_rate = None
self.selective_ack = None
self.window_size = None
self.receive_q = None
self.maximum_segment_size = None
self.syn_wait_time = None
self.timestamp = None
self.path_mtu_discovery = None
self.directory = None
self._children_name_map["directory"] = "directory"
self._children_yang_names.add("directory")
self.throttle = None
self._children_name_map["throttle"] = "throttle"
self._children_yang_names.add("throttle")
self.num_thread = None
self._children_name_map["num_thread"] = "num-thread"
self._children_yang_names.add("num-thread")
self._segment_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip-tcp"
def __setattr__(self, name, value):
self._perform_setattr(IpTcp, ['accept_rate', 'selective_ack', 'window_size', 'receive_q', 'maximum_segment_size', 'syn_wait_time', 'timestamp', 'path_mtu_discovery'], name, value)
class Directory(Entity):
"""
TCP directory details
.. attribute:: directoryname
Directory name
**type**\: str
**mandatory**\: True
.. attribute:: max_debug_files
Set number of Debug files
**type**\: int
**range:** 1..10000
**default value**\: 256
.. attribute:: max_file_size_files
Set size of debug files in bytes
**type**\: int
**range:** 1024..4294967295
**units**\: byte
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(IpTcp.Directory, self).__init__()
self.yang_name = "directory"
self.yang_parent_name = "ip-tcp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('directoryname', YLeaf(YType.str, 'directoryname')),
('max_debug_files', YLeaf(YType.uint32, 'max-debug-files')),
('max_file_size_files', YLeaf(YType.uint32, 'max-file-size-files')),
])
self.directoryname = None
self.max_debug_files = None
self.max_file_size_files = None
self._segment_path = lambda: "directory"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip-tcp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(IpTcp.Directory, ['directoryname', 'max_debug_files', 'max_file_size_files'], name, value)
class Throttle(Entity):
"""
Throttle TCP receive buffer (in percentage)
.. attribute:: tcpmin_throttle
Min throttle
**type**\: int
**range:** 0..100
**mandatory**\: True
.. attribute:: tcpmaxthrottle
Max throttle
**type**\: int
**range:** 0..100
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(IpTcp.Throttle, self).__init__()
self.yang_name = "throttle"
self.yang_parent_name = "ip-tcp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('tcpmin_throttle', YLeaf(YType.uint32, 'tcpmin-throttle')),
('tcpmaxthrottle', YLeaf(YType.uint32, 'tcpmaxthrottle')),
])
self.tcpmin_throttle = None
self.tcpmaxthrottle = None
self._segment_path = lambda: "throttle"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip-tcp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(IpTcp.Throttle, ['tcpmin_throttle', 'tcpmaxthrottle'], name, value)
class NumThread(Entity):
"""
TCP InQueue and OutQueue threads
.. attribute:: tcp_in_q_threads
InQ Threads
**type**\: int
**range:** 1..16
**mandatory**\: True
.. attribute:: tcp_out_q_threads
OutQ Threads
**type**\: int
**range:** 1..16
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(IpTcp.NumThread, self).__init__()
self.yang_name = "num-thread"
self.yang_parent_name = "ip-tcp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('tcp_in_q_threads', YLeaf(YType.uint32, 'tcp-in-q-threads')),
('tcp_out_q_threads', YLeaf(YType.uint32, 'tcp-out-q-threads')),
])
self.tcp_in_q_threads = None
self.tcp_out_q_threads = None
self._segment_path = lambda: "num-thread"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip-tcp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(IpTcp.NumThread, ['tcp_in_q_threads', 'tcp_out_q_threads'], name, value)
def clone_ptr(self):
self._top_entity = IpTcp()
return self._top_entity
class Ip(Entity):
"""
ip
.. attribute:: cinetd
Cinetd configuration data
**type**\: :py:class:`Cinetd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd>`
.. attribute:: forward_protocol
Controls forwarding of physical and directed IP broadcasts
**type**\: :py:class:`ForwardProtocol <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.ForwardProtocol>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip, self).__init__()
self._top_entity = None
self.yang_name = "ip"
self.yang_parent_name = "Cisco-IOS-XR-ip-tcp-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("cinetd", ("cinetd", Ip.Cinetd)), ("Cisco-IOS-XR-ip-udp-cfg:forward-protocol", ("forward_protocol", Ip.ForwardProtocol))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.cinetd = Ip.Cinetd()
self.cinetd.parent = self
self._children_name_map["cinetd"] = "cinetd"
self._children_yang_names.add("cinetd")
self.forward_protocol = Ip.ForwardProtocol()
self.forward_protocol.parent = self
self._children_name_map["forward_protocol"] = "Cisco-IOS-XR-ip-udp-cfg:forward-protocol"
self._children_yang_names.add("Cisco-IOS-XR-ip-udp-cfg:forward-protocol")
self._segment_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip"
class Cinetd(Entity):
"""
Cinetd configuration data
.. attribute:: services
Describing services of cinetd
**type**\: :py:class:`Services <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services>`
.. attribute:: rate_limit
Number of service requests accepted per second
**type**\: int
**range:** 1..100
**default value**\: 1
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd, self).__init__()
self.yang_name = "cinetd"
self.yang_parent_name = "ip"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("services", ("services", Ip.Cinetd.Services))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('rate_limit', YLeaf(YType.uint32, 'Cisco-IOS-XR-ipv4-cinetd-cfg:rate-limit')),
])
self.rate_limit = None
self.services = Ip.Cinetd.Services()
self.services.parent = self
self._children_name_map["services"] = "services"
self._children_yang_names.add("services")
self._segment_path = lambda: "cinetd"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd, ['rate_limit'], name, value)
class Services(Entity):
"""
Describing services of cinetd
.. attribute:: ipv4
IPV4 related services
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv4>`
.. attribute:: vrfs
VRF table
**type**\: :py:class:`Vrfs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs>`
.. attribute:: ipv6
IPV6 related services
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services, self).__init__()
self.yang_name = "services"
self.yang_parent_name = "cinetd"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4", ("ipv4", Ip.Cinetd.Services.Ipv4)), ("vrfs", ("vrfs", Ip.Cinetd.Services.Vrfs)), ("ipv6", ("ipv6", Ip.Cinetd.Services.Ipv6))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.ipv4 = Ip.Cinetd.Services.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._children_yang_names.add("ipv4")
self.vrfs = Ip.Cinetd.Services.Vrfs()
self.vrfs.parent = self
self._children_name_map["vrfs"] = "vrfs"
self._children_yang_names.add("vrfs")
self.ipv6 = Ip.Cinetd.Services.Ipv6()
self.ipv6.parent = self
self._children_name_map["ipv6"] = "ipv6"
self._children_yang_names.add("ipv6")
self._segment_path = lambda: "services"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/%s" % self._segment_path()
class Ipv4(Entity):
"""
IPV4 related services
.. attribute:: small_servers
Describing IPV4 and IPV6 small servers
**type**\: :py:class:`SmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv4.SmallServers>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "services"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("small-servers", ("small_servers", Ip.Cinetd.Services.Ipv4.SmallServers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.small_servers = Ip.Cinetd.Services.Ipv4.SmallServers()
self.small_servers.parent = self
self._children_name_map["small_servers"] = "small-servers"
self._children_yang_names.add("small-servers")
self._segment_path = lambda: "ipv4"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/%s" % self._segment_path()
class SmallServers(Entity):
"""
Describing IPV4 and IPV6 small servers
.. attribute:: tcp_small_servers
Describing TCP related IPV4 and IPV6 small servers
**type**\: :py:class:`TcpSmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv4.SmallServers.TcpSmallServers>`
**presence node**\: True
.. attribute:: udp_small_servers
UDP small servers configuration
**type**\: :py:class:`UdpSmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv4.SmallServers.UdpSmallServers>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv4.SmallServers, self).__init__()
self.yang_name = "small-servers"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tcp-small-servers", ("tcp_small_servers", Ip.Cinetd.Services.Ipv4.SmallServers.TcpSmallServers)), ("Cisco-IOS-XR-ip-udp-cfg:udp-small-servers", ("udp_small_servers", Ip.Cinetd.Services.Ipv4.SmallServers.UdpSmallServers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.tcp_small_servers = None
self._children_name_map["tcp_small_servers"] = "tcp-small-servers"
self._children_yang_names.add("tcp-small-servers")
self.udp_small_servers = None
self._children_name_map["udp_small_servers"] = "Cisco-IOS-XR-ip-udp-cfg:udp-small-servers"
self._children_yang_names.add("Cisco-IOS-XR-ip-udp-cfg:udp-small-servers")
self._segment_path = lambda: "small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv4/%s" % self._segment_path()
class TcpSmallServers(Entity):
"""
Describing TCP related IPV4 and IPV6 small
servers
.. attribute:: access_control_list_name
Specify the access list
**type**\: str
.. attribute:: small_server
Set number of allowable TCP small servers, specify 0 for no\-limit
**type**\: union of the below types:
**type**\: :py:class:`SmallServer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers.SmallServer>`
**type**\: int
**range:** 0..2147483647
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv4.SmallServers.TcpSmallServers, self).__init__()
self.yang_name = "tcp-small-servers"
self.yang_parent_name = "small-servers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_control_list_name', YLeaf(YType.str, 'access-control-list-name')),
('small_server', YLeaf(YType.str, 'small-server')),
])
self.access_control_list_name = None
self.small_server = None
self._segment_path = lambda: "tcp-small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv4/small-servers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Ipv4.SmallServers.TcpSmallServers, ['access_control_list_name', 'small_server'], name, value)
class SmallServer(Enum):
"""
SmallServer (Enum Class)
Set number of allowable TCP small servers,
specify 0 for no\-limit
.. data:: no_limit = 0
Unlimited Servers
"""
no_limit = Enum.YLeaf(0, "no-limit")
class UdpSmallServers(Entity):
"""
UDP small servers configuration
.. attribute:: access_control_list_name
Specify the access list
**type**\: str
.. attribute:: small_server
Set number of allowable small servers, specify 0 for no\-limit
**type**\: union of the below types:
**type**\: :py:class:`SmallServer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv4.SmallServers.UdpSmallServers.SmallServer>`
**type**\: int
**range:** 0..2147483647
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.Cinetd.Services.Ipv4.SmallServers.UdpSmallServers, self).__init__()
self.yang_name = "udp-small-servers"
self.yang_parent_name = "small-servers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_control_list_name', YLeaf(YType.str, 'access-control-list-name')),
('small_server', YLeaf(YType.str, 'small-server')),
])
self.access_control_list_name = None
self.small_server = None
self._segment_path = lambda: "Cisco-IOS-XR-ip-udp-cfg:udp-small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv4/small-servers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Ipv4.SmallServers.UdpSmallServers, ['access_control_list_name', 'small_server'], name, value)
class SmallServer(Enum):
"""
SmallServer (Enum Class)
Set number of allowable small servers, specify
0 for no\-limit
.. data:: no_limit = 0
Unlimited Servers
"""
no_limit = Enum.YLeaf(0, "no-limit")
class Vrfs(Entity):
"""
VRF table
.. attribute:: vrf
VRF specific data
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs, self).__init__()
self.yang_name = "vrfs"
self.yang_parent_name = "services"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("vrf", ("vrf", Ip.Cinetd.Services.Vrfs.Vrf))])
self._leafs = OrderedDict()
self.vrf = YList(self)
self._segment_path = lambda: "vrfs"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs, [], name, value)
class Vrf(Entity):
"""
VRF specific data
.. attribute:: vrf_name (key)
Name of the VRF instance
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: ipv6
IPV6 related services
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv6>`
.. attribute:: ipv4
IPV4 related services
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv4>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf, self).__init__()
self.yang_name = "vrf"
self.yang_parent_name = "vrfs"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['vrf_name']
self._child_container_classes = OrderedDict([("ipv6", ("ipv6", Ip.Cinetd.Services.Vrfs.Vrf.Ipv6)), ("ipv4", ("ipv4", Ip.Cinetd.Services.Vrfs.Vrf.Ipv4))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf_name', YLeaf(YType.str, 'vrf-name')),
])
self.vrf_name = None
self.ipv6 = Ip.Cinetd.Services.Vrfs.Vrf.Ipv6()
self.ipv6.parent = self
self._children_name_map["ipv6"] = "ipv6"
self._children_yang_names.add("ipv6")
self.ipv4 = Ip.Cinetd.Services.Vrfs.Vrf.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._children_yang_names.add("ipv4")
self._segment_path = lambda: "vrf" + "[vrf-name='" + str(self.vrf_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/vrfs/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs.Vrf, ['vrf_name'], name, value)
class Ipv6(Entity):
"""
IPV6 related services
.. attribute:: telnet
TELNET server configuration commands
**type**\: :py:class:`Telnet <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet>`
.. attribute:: tftp
TFTP server configuration commands
**type**\: :py:class:`Tftp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6, self).__init__()
self.yang_name = "ipv6"
self.yang_parent_name = "vrf"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("telnet", ("telnet", Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet)), ("tftp", ("tftp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.telnet = Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet()
self.telnet.parent = self
self._children_name_map["telnet"] = "telnet"
self._children_yang_names.add("telnet")
self.tftp = Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp()
self.tftp.parent = self
self._children_name_map["tftp"] = "tftp"
self._children_yang_names.add("tftp")
self._segment_path = lambda: "ipv6"
class Telnet(Entity):
"""
TELNET server configuration commands
.. attribute:: tcp
TCP details
**type**\: :py:class:`Tcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet.Tcp>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet, self).__init__()
self.yang_name = "telnet"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tcp", ("tcp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet.Tcp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.tcp = None
self._children_name_map["tcp"] = "tcp"
self._children_yang_names.add("tcp")
self._segment_path = lambda: "telnet"
class Tcp(Entity):
"""
TCP details
.. attribute:: access_list_name
Access list
**type**\: str
.. attribute:: maximum_server
Set number of allowable servers
**type**\: int
**range:** 1..100
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet.Tcp, self).__init__()
self.yang_name = "tcp"
self.yang_parent_name = "telnet"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_list_name', YLeaf(YType.str, 'access-list-name')),
('maximum_server', YLeaf(YType.uint32, 'maximum-server')),
])
self.access_list_name = None
self.maximum_server = None
self._segment_path = lambda: "tcp"
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Telnet.Tcp, ['access_list_name', 'maximum_server'], name, value)
class Tftp(Entity):
"""
TFTP server configuration commands
.. attribute:: udp
UDP details
**type**\: :py:class:`Udp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp.Udp>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp, self).__init__()
self.yang_name = "tftp"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("udp", ("udp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp.Udp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.udp = None
self._children_name_map["udp"] = "udp"
self._children_yang_names.add("udp")
self._segment_path = lambda: "tftp"
class Udp(Entity):
"""
UDP details
.. attribute:: access_list_name
Access list
**type**\: str
.. attribute:: maximum_server
Set number of allowable servers, 0 for no\-limit
**type**\: int
**range:** 0..2147483647
.. attribute:: home_directory
Specify device name where file is read from (e .g. flash\:)
**type**\: str
**mandatory**\: True
.. attribute:: dscp_value
Set IP DSCP (DiffServ CodePoint) for TFTP Server Packets
**type**\: int
**range:** \-2147483648..2147483647
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp.Udp, self).__init__()
self.yang_name = "udp"
self.yang_parent_name = "tftp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_list_name', YLeaf(YType.str, 'access-list-name')),
('maximum_server', YLeaf(YType.uint32, 'maximum-server')),
('home_directory', YLeaf(YType.str, 'home-directory')),
('dscp_value', YLeaf(YType.int32, 'dscp-value')),
])
self.access_list_name = None
self.maximum_server = None
self.home_directory = None
self.dscp_value = None
self._segment_path = lambda: "udp"
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs.Vrf.Ipv6.Tftp.Udp, ['access_list_name', 'maximum_server', 'home_directory', 'dscp_value'], name, value)
class Ipv4(Entity):
"""
IPV4 related services
.. attribute:: telnet
TELNET server configuration commands
**type**\: :py:class:`Telnet <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet>`
.. attribute:: tftp
TFTP server configuration commands
**type**\: :py:class:`Tftp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "vrf"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("telnet", ("telnet", Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet)), ("tftp", ("tftp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.telnet = Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet()
self.telnet.parent = self
self._children_name_map["telnet"] = "telnet"
self._children_yang_names.add("telnet")
self.tftp = Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp()
self.tftp.parent = self
self._children_name_map["tftp"] = "tftp"
self._children_yang_names.add("tftp")
self._segment_path = lambda: "ipv4"
class Telnet(Entity):
"""
TELNET server configuration commands
.. attribute:: tcp
TCP details
**type**\: :py:class:`Tcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet.Tcp>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet, self).__init__()
self.yang_name = "telnet"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tcp", ("tcp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet.Tcp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.tcp = None
self._children_name_map["tcp"] = "tcp"
self._children_yang_names.add("tcp")
self._segment_path = lambda: "telnet"
class Tcp(Entity):
"""
TCP details
.. attribute:: access_list_name
Access list
**type**\: str
.. attribute:: maximum_server
Set number of allowable servers
**type**\: int
**range:** 1..100
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet.Tcp, self).__init__()
self.yang_name = "tcp"
self.yang_parent_name = "telnet"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_list_name', YLeaf(YType.str, 'access-list-name')),
('maximum_server', YLeaf(YType.uint32, 'maximum-server')),
])
self.access_list_name = None
self.maximum_server = None
self._segment_path = lambda: "tcp"
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Telnet.Tcp, ['access_list_name', 'maximum_server'], name, value)
class Tftp(Entity):
"""
TFTP server configuration commands
.. attribute:: udp
UDP details
**type**\: :py:class:`Udp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp.Udp>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp, self).__init__()
self.yang_name = "tftp"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("udp", ("udp", Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp.Udp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.udp = None
self._children_name_map["udp"] = "udp"
self._children_yang_names.add("udp")
self._segment_path = lambda: "tftp"
class Udp(Entity):
"""
UDP details
.. attribute:: access_list_name
Access list
**type**\: str
.. attribute:: maximum_server
Set number of allowable servers, 0 for no\-limit
**type**\: int
**range:** 0..2147483647
.. attribute:: home_directory
Specify device name where file is read from (e .g. flash\:)
**type**\: str
**mandatory**\: True
.. attribute:: dscp_value
Set IP DSCP (DiffServ CodePoint) for TFTP Server Packets
**type**\: int
**range:** \-2147483648..2147483647
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp.Udp, self).__init__()
self.yang_name = "udp"
self.yang_parent_name = "tftp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_list_name', YLeaf(YType.str, 'access-list-name')),
('maximum_server', YLeaf(YType.uint32, 'maximum-server')),
('home_directory', YLeaf(YType.str, 'home-directory')),
('dscp_value', YLeaf(YType.int32, 'dscp-value')),
])
self.access_list_name = None
self.maximum_server = None
self.home_directory = None
self.dscp_value = None
self._segment_path = lambda: "udp"
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Vrfs.Vrf.Ipv4.Tftp.Udp, ['access_list_name', 'maximum_server', 'home_directory', 'dscp_value'], name, value)
class Ipv6(Entity):
"""
IPV6 related services
.. attribute:: small_servers
Describing IPV4 and IPV6 small servers
**type**\: :py:class:`SmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv6, self).__init__()
self.yang_name = "ipv6"
self.yang_parent_name = "services"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("small-servers", ("small_servers", Ip.Cinetd.Services.Ipv6.SmallServers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.small_servers = Ip.Cinetd.Services.Ipv6.SmallServers()
self.small_servers.parent = self
self._children_name_map["small_servers"] = "small-servers"
self._children_yang_names.add("small-servers")
self._segment_path = lambda: "ipv6"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/%s" % self._segment_path()
class SmallServers(Entity):
"""
Describing IPV4 and IPV6 small servers
.. attribute:: tcp_small_servers
Describing TCP related IPV4 and IPV6 small servers
**type**\: :py:class:`TcpSmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers>`
**presence node**\: True
.. attribute:: udp_small_servers
UDP small servers configuration
**type**\: :py:class:`UdpSmallServers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers.UdpSmallServers>`
**presence node**\: True
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv6.SmallServers, self).__init__()
self.yang_name = "small-servers"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("tcp-small-servers", ("tcp_small_servers", Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers)), ("Cisco-IOS-XR-ip-udp-cfg:udp-small-servers", ("udp_small_servers", Ip.Cinetd.Services.Ipv6.SmallServers.UdpSmallServers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.tcp_small_servers = None
self._children_name_map["tcp_small_servers"] = "tcp-small-servers"
self._children_yang_names.add("tcp-small-servers")
self.udp_small_servers = None
self._children_name_map["udp_small_servers"] = "Cisco-IOS-XR-ip-udp-cfg:udp-small-servers"
self._children_yang_names.add("Cisco-IOS-XR-ip-udp-cfg:udp-small-servers")
self._segment_path = lambda: "small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv6/%s" % self._segment_path()
class TcpSmallServers(Entity):
"""
Describing TCP related IPV4 and IPV6 small
servers
.. attribute:: access_control_list_name
Specify the access list
**type**\: str
.. attribute:: small_server
Set number of allowable TCP small servers, specify 0 for no\-limit
**type**\: union of the below types:
**type**\: :py:class:`SmallServer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers.SmallServer>`
**type**\: int
**range:** 0..2147483647
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-tcp-cfg'
_revision = '2016-02-26'
def __init__(self):
super(Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers, self).__init__()
self.yang_name = "tcp-small-servers"
self.yang_parent_name = "small-servers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_control_list_name', YLeaf(YType.str, 'access-control-list-name')),
('small_server', YLeaf(YType.str, 'small-server')),
])
self.access_control_list_name = None
self.small_server = None
self._segment_path = lambda: "tcp-small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv6/small-servers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Ipv6.SmallServers.TcpSmallServers, ['access_control_list_name', 'small_server'], name, value)
class SmallServer(Enum):
"""
SmallServer (Enum Class)
Set number of allowable TCP small servers,
specify 0 for no\-limit
.. data:: no_limit = 0
Unlimited Servers
"""
no_limit = Enum.YLeaf(0, "no-limit")
class UdpSmallServers(Entity):
"""
UDP small servers configuration
.. attribute:: access_control_list_name
Specify the access list
**type**\: str
.. attribute:: small_server
Set number of allowable small servers, specify 0 for no\-limit
**type**\: union of the below types:
**type**\: :py:class:`SmallServer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.Cinetd.Services.Ipv6.SmallServers.UdpSmallServers.SmallServer>`
**type**\: int
**range:** 0..2147483647
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.Cinetd.Services.Ipv6.SmallServers.UdpSmallServers, self).__init__()
self.yang_name = "udp-small-servers"
self.yang_parent_name = "small-servers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('access_control_list_name', YLeaf(YType.str, 'access-control-list-name')),
('small_server', YLeaf(YType.str, 'small-server')),
])
self.access_control_list_name = None
self.small_server = None
self._segment_path = lambda: "Cisco-IOS-XR-ip-udp-cfg:udp-small-servers"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/cinetd/services/ipv6/small-servers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.Cinetd.Services.Ipv6.SmallServers.UdpSmallServers, ['access_control_list_name', 'small_server'], name, value)
class SmallServer(Enum):
"""
SmallServer (Enum Class)
Set number of allowable small servers, specify
0 for no\-limit
.. data:: no_limit = 0
Unlimited Servers
"""
no_limit = Enum.YLeaf(0, "no-limit")
class ForwardProtocol(Entity):
"""
Controls forwarding of physical and directed IP
broadcasts
.. attribute:: udp
Packets to a specific UDP port
**type**\: :py:class:`Udp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.ForwardProtocol.Udp>`
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.ForwardProtocol, self).__init__()
self.yang_name = "forward-protocol"
self.yang_parent_name = "ip"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("udp", ("udp", Ip.ForwardProtocol.Udp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.udp = Ip.ForwardProtocol.Udp()
self.udp.parent = self
self._children_name_map["udp"] = "udp"
self._children_yang_names.add("udp")
self._segment_path = lambda: "Cisco-IOS-XR-ip-udp-cfg:forward-protocol"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/%s" % self._segment_path()
class Udp(Entity):
"""
Packets to a specific UDP port
.. attribute:: ports
Port configuration
**type**\: :py:class:`Ports <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.ForwardProtocol.Udp.Ports>`
.. attribute:: disable
Disable IP Forward Protocol UDP
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.ForwardProtocol.Udp, self).__init__()
self.yang_name = "udp"
self.yang_parent_name = "forward-protocol"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ports", ("ports", Ip.ForwardProtocol.Udp.Ports))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('disable', YLeaf(YType.empty, 'disable')),
])
self.disable = None
self.ports = Ip.ForwardProtocol.Udp.Ports()
self.ports.parent = self
self._children_name_map["ports"] = "ports"
self._children_yang_names.add("ports")
self._segment_path = lambda: "udp"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/Cisco-IOS-XR-ip-udp-cfg:forward-protocol/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.ForwardProtocol.Udp, ['disable'], name, value)
class Ports(Entity):
"""
Port configuration
.. attribute:: port
Well\-known ports are enabled by default and non well\-known ports are disabled by default. It is not allowed to configure the default
**type**\: list of :py:class:`Port <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ip_tcp_cfg.Ip.ForwardProtocol.Udp.Ports.Port>`
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.ForwardProtocol.Udp.Ports, self).__init__()
self.yang_name = "ports"
self.yang_parent_name = "udp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("port", ("port", Ip.ForwardProtocol.Udp.Ports.Port))])
self._leafs = OrderedDict()
self.port = YList(self)
self._segment_path = lambda: "ports"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/Cisco-IOS-XR-ip-udp-cfg:forward-protocol/udp/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.ForwardProtocol.Udp.Ports, [], name, value)
class Port(Entity):
"""
Well\-known ports are enabled by default and
non well\-known ports are disabled by default.
It is not allowed to configure the default.
.. attribute:: port_id (key)
Port number
**type**\: int
**range:** 1..65535
.. attribute:: enable
Specify 'false' to disable well\-known ports Domain (53), TFTP (69), NameServer (42), TACACS (49), NetBiosNameService (137), or NetBiosDatagramService (138). Specify 'true' to enable non well\-known ports
**type**\: bool
**mandatory**\: True
"""
_prefix = 'ip-udp-cfg'
_revision = '2017-07-31'
def __init__(self):
super(Ip.ForwardProtocol.Udp.Ports.Port, self).__init__()
self.yang_name = "port"
self.yang_parent_name = "ports"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['port_id']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('port_id', YLeaf(YType.uint16, 'port-id')),
('enable', YLeaf(YType.boolean, 'enable')),
])
self.port_id = None
self.enable = None
self._segment_path = lambda: "port" + "[port-id='" + str(self.port_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ip-tcp-cfg:ip/Cisco-IOS-XR-ip-udp-cfg:forward-protocol/udp/ports/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Ip.ForwardProtocol.Udp.Ports.Port, ['port_id', 'enable'], name, value)
def clone_ptr(self):
self._top_entity = Ip()
return self._top_entity
| 42.361589
| 293
| 0.451038
| 6,245
| 71,464
| 4.870777
| 0.045156
| 0.028141
| 0.035177
| 0.028799
| 0.848872
| 0.810343
| 0.789434
| 0.783056
| 0.759715
| 0.745611
| 0
| 0.018231
| 0.453501
| 71,464
| 1,686
| 294
| 42.386714
| 0.76062
| 0.204565
| 0
| 0.709497
| 0
| 0.013966
| 0.118662
| 0.040802
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071229
| false
| 0
| 0.006983
| 0
| 0.135475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7f6bd8087e33d171126f2aca954e1ce2fac1bd0
| 26,461
|
py
|
Python
|
cloudify_libvirt/tests/test_volume.py
|
0lvin-cfy/cloudify-libvirt-plugin
|
90e77da5bf086dfb7ad20604a25b769badee1bb7
|
[
"Apache-2.0"
] | 2
|
2017-11-29T18:21:19.000Z
|
2020-06-19T03:25:23.000Z
|
cloudify_libvirt/tests/test_volume.py
|
0lvin-cfy/cloudify-libvirt-plugin
|
90e77da5bf086dfb7ad20604a25b769badee1bb7
|
[
"Apache-2.0"
] | 13
|
2018-01-29T16:25:54.000Z
|
2020-07-01T08:03:38.000Z
|
cloudify_libvirt/tests/test_volume.py
|
0lvin-cfy/cloudify-libvirt-plugin
|
90e77da5bf086dfb7ad20604a25b769badee1bb7
|
[
"Apache-2.0"
] | 2
|
2017-11-19T15:04:39.000Z
|
2020-06-19T03:25:25.000Z
|
# Copyright (c) 2016-2019 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from cloudify.state import current_ctx
from cloudify.mocks import MockCloudifyContext
from cloudify.exceptions import NonRecoverableError
from cloudify_common_sdk._compat import builtins_open
from cloudify_libvirt.tests.test_common_base import LibVirtCommonTest
import cloudify_libvirt.volume_tasks as volume_tasks
class TestVolumeTasks(LibVirtCommonTest):
def _create_ctx(self):
_ctx = MockCloudifyContext(
'node_name',
properties={
'libvirt_auth': {'a': 'c'},
'params': {'pool': 'pool_name'},
},
runtime_properties={
'libvirt_auth': {'a': 'd'}
}
)
current_ctx.set(_ctx)
return _ctx
def _test_empty_connection_backup(self, func):
# check correct handle exception with empty connection
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'resource'
self._check_correct_connect(
"cloudify_libvirt.volume_tasks.libvirt.open",
func, [], {'ctx': _ctx, "snapshot_name": "backup"})
def _test_empty_volume_backup(self, func):
# check correct handle exception with empty volume
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'resource'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
self._check_no_such_object_volume(
"cloudify_libvirt.volume_tasks.libvirt.open",
func, [], {'ctx': _ctx, "snapshot_name": "backup"}, 'resource')
def _test_empty_volume(self, func):
# check correct handle exception with empty volume
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'resource'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
self._check_no_such_object_volume(
"cloudify_libvirt.volume_tasks.libvirt.open",
func, [], {'ctx': _ctx}, 'resource')
def _create_fake_volume_backup(self):
volume = mock.Mock()
volume.XMLDesc = mock.Mock(return_value="<volume/>")
volume.isActive = mock.Mock(return_value=1)
volume.name = mock.Mock(return_value="volume_name")
pool = mock.Mock()
pool.XMLDesc = mock.Mock(return_value="<pool/>")
pool.isActive = mock.Mock(return_value=1)
pool.name = mock.Mock(return_value="pool_name")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'resource'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
_ctx.node.properties['params'] = {}
_ctx.instance.runtime_properties["backups"] = {
"node_name-backup": "<xml/>"}
return _ctx, connect, pool, volume
def test_snapshot_apply(self):
self._test_no_resource_id(volume_tasks.snapshot_apply,
"No volume for restore")
self._test_no_snapshot_name(
self._create_ctx(),
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.snapshot_apply)
self._test_empty_connection_backup(volume_tasks.snapshot_apply)
self._test_empty_volume_backup(volume_tasks.snapshot_apply)
# no such snapshot
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"No snapshots found with name: node_name-backup!."
):
volume_tasks.snapshot_apply(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=True)
# we have such snapshot
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.snapshot_apply(
ctx=_ctx, snapshot_name="backup",
snapshot_incremental=True)
# no such backup
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=False)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"No backups found with name: node_name-backup!."
):
volume_tasks.snapshot_apply(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=False)
# have backup
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=True)
):
fake_file = mock.mock_open()
fake_file().read.return_value = "<volume/>"
with mock.patch(
builtins_open, fake_file
):
volume_tasks.snapshot_apply(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=False)
fake_file.assert_called_with('./backup!/resource.xml', 'r')
def test_snapshot_create(self):
self._test_no_resource_id(volume_tasks.snapshot_create,
"No volume for backup")
self._test_no_snapshot_name(
self._create_ctx(),
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.snapshot_create)
self._test_empty_connection_backup(volume_tasks.snapshot_create)
self._test_empty_volume_backup(volume_tasks.snapshot_create)
# check create snapshot with error, already exists
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"Snapshot node_name-backup already exists."
):
volume_tasks.snapshot_create(ctx=_ctx, snapshot_name="backup",
snapshot_incremental=True)
connect.storagePoolLookupByName.assert_called_with('pool_name')
pool.storageVolLookupByName.assert_called_with('resource')
# no such snapshots
_ctx.instance.runtime_properties["backups"] = {}
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.snapshot_create(ctx=_ctx, snapshot_name="backup",
snapshot_incremental=True)
self.assertEqual(
_ctx.instance.runtime_properties["backups"],
{"node_name-backup": "<volume/>"})
# check create snapshot
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"os.path.isdir",
mock.Mock(return_value=True)
):
fake_file = mock.mock_open()
fake_file().read.return_value = "!!!!"
with mock.patch(
builtins_open, fake_file
):
# with error, already exists
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=True)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"Backup node_name-backup already exists."
):
volume_tasks.snapshot_create(
ctx=_ctx, snapshot_name="backup",
snapshot_incremental=False)
# without error
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=False)
):
volume_tasks.snapshot_create(
ctx=_ctx, snapshot_name="backup",
snapshot_incremental=False)
fake_file().write.assert_called_with("<volume/>")
def test_snapshot_delete(self):
self._test_no_resource_id(volume_tasks.snapshot_delete,
"No volume for backup delete")
self._test_no_snapshot_name(
self._create_ctx(),
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.snapshot_delete)
# no such snapshots
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"No snapshots found with name: node_name-backup!."
):
volume_tasks.snapshot_delete(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=True)
self.assertEqual(
_ctx.instance.runtime_properties["backups"],
{'node_name-backup': "<xml/>"})
# remove snapshot
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.snapshot_delete(ctx=_ctx, snapshot_name="backup",
snapshot_incremental=True)
self.assertEqual(_ctx.instance.runtime_properties["backups"], {})
# no such backup
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=False)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"No backups found with name: node_name-backup!."
):
volume_tasks.snapshot_delete(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=False)
# remove backup
_ctx, connect, pool, volume = self._create_fake_volume_backup()
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"os.path.isfile",
mock.Mock(return_value=True)
):
fake_file = mock.mock_open()
fake_file().read.return_value = "!!!!"
remove_mock = mock.Mock()
with mock.patch(
"os.remove",
remove_mock
):
with mock.patch(
builtins_open, fake_file
):
volume_tasks.snapshot_delete(
ctx=_ctx, snapshot_name="backup!",
snapshot_incremental=False)
fake_file.assert_called_with('./backup!/resource.xml', 'r')
remove_mock.assert_called_with('./backup!/resource.xml')
def test_create(self):
# check correct handle exception with empty connection
self._check_correct_connect(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.create, [], {'ctx': self._create_ctx()})
# check error with create volume image
self._check_create_object(
'Failed to find the pool',
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.create, [], {'ctx': self._create_ctx(),
'params': {'pool': 'empty'}})
# successful create
_ctx = self._create_ctx()
_ctx.get_resource = mock.Mock(return_value='<somexml/>')
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume_name")
pool = mock.Mock()
pool.createXML = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
# without params
_ctx.instance.runtime_properties['params'] = {}
_ctx.node.properties['params'] = {}
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.create(ctx=_ctx,
template_resource="template_resource",
params={'pool': 'empty'})
pool.createXML.assert_called_with('<somexml/>')
self.assertEqual(
_ctx.instance.runtime_properties['resource_id'], "volume_name"
)
# failed check size of download
_ctx.instance.runtime_properties['resource_id'] = None
_ctx.instance.runtime_properties['params'] = {}
_ctx.node.properties['params'] = {}
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
# empty
head_response = mock.Mock()
head_response.headers = {'Content-Length': 0}
with mock.patch(
"cloudify_libvirt.volume_tasks.requests.head",
mock.Mock(return_value=head_response)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"Failed to download volume."
):
volume_tasks.create(
ctx=_ctx,
template_resource="template_resource",
params={
'pool': 'empty',
'url': "https://fake.org/centos.iso"})
# sucessful check size of download
_ctx.instance.runtime_properties['resource_id'] = None
_ctx.instance.runtime_properties['params'] = {}
_ctx.node.properties['params'] = {}
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
head_response = mock.Mock()
head_response.headers = {'Content-Length': 512,
'Accept-Ranges': 'bytes'}
with mock.patch(
"cloudify_libvirt.volume_tasks.requests.head",
mock.Mock(return_value=head_response)
):
volume_tasks.create(
ctx=_ctx,
template_resource="template_resource",
params={
'pool': 'empty',
'url': "https://fake.org/centos.iso"})
# failed on create
_ctx.instance.runtime_properties['resource_id'] = None
_ctx.instance.runtime_properties['params'] = {}
_ctx.node.properties['params'] = {}
pool.createXML = mock.Mock(return_value=None)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with self.assertRaisesRegexp(
NonRecoverableError,
'Failed to create a virtual volume'
):
volume_tasks.create(ctx=_ctx,
template_resource="template_resource",
params={'pool': 'empty'})
def test_reuse_volume_create_not_exist(self):
# check correct handle exception with empty network
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
self._check_no_such_object_volume(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.create, [], {
'ctx': _ctx,
"resource_id": 'resource',
"use_external_resource": True,
}, 'resource')
def test_reuse_volume_create_exist(self):
# check that we can use network
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume")
pool = mock.Mock()
pool.name = mock.Mock(return_value="pool")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.create(ctx=_ctx,
resource_id='resource',
use_external_resource=True)
connect.storagePoolLookupByName.assert_called_with('pool_name')
pool.storageVolLookupByName.assert_called_with('resource')
self.assertEqual(
_ctx.instance.runtime_properties['resource_id'], 'volume'
)
self.assertTrue(
_ctx.instance.runtime_properties['use_external_resource']
)
def test_start(self):
# check correct handle exception with empty connection
self._test_check_correct_connect_action(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.start)
self._test_empty_volume(volume_tasks.start)
self._test_reused_object(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.start)
self._test_no_resource_id(volume_tasks.start)
def test_start_wipe(self):
# zero wipe
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'volume'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume")
volume.upload = mock.Mock()
pool = mock.Mock()
pool.name = mock.Mock(return_value="pool")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.start(ctx=_ctx,
params={
'zero_wipe': True,
'allocation': 1
})
def test_start_download(self):
# download
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'volume'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume")
volume.upload = mock.Mock()
pool = mock.Mock()
pool.name = mock.Mock(return_value="pool")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
# empty
head_response = mock.Mock()
head_response.headers = {'Content-Length': 0}
with mock.patch(
"cloudify_libvirt.volume_tasks.requests.head",
mock.Mock(return_value=head_response)
):
with self.assertRaisesRegexp(
NonRecoverableError,
"Failed to download volume."
):
volume_tasks.start(
ctx=_ctx,
params={
'url': "https://fake.org/centos.iso"})
# 512 for download
head_response = mock.Mock()
head_response.headers = {'Content-Length': 512,
'Accept-Ranges': 'bytes'}
head_response.iter_content = mock.Mock(return_value=["\0" * 256])
with mock.patch(
"cloudify_libvirt.volume_tasks.requests.head",
mock.Mock(return_value=head_response)
):
with mock.patch(
"cloudify_libvirt.volume_tasks.requests.get",
mock.Mock(return_value=head_response)
):
volume_tasks.start(
ctx=_ctx,
params={
'url': "https://fake.org/centos.iso"})
def test_stop(self):
# check correct handle exception with empty connection
self._test_check_correct_connect_action(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.stop)
self._test_empty_volume(volume_tasks.stop)
self._test_reused_object(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.stop)
self._test_no_resource_id(volume_tasks.stop)
def test_stop_wipe(self):
# failed to wipe/error ignored
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'volume'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume")
volume.wipe = mock.Mock(
side_effect=volume_tasks.libvirt.libvirtError("e"))
pool = mock.Mock()
pool.name = mock.Mock(return_value="pool")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.stop(ctx=_ctx)
# failed to wipe/wrong response
volume.wipe = mock.Mock(return_value=-1)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with mock.patch(
"cloudify_libvirt.volume_tasks.time.sleep",
mock.Mock(return_value=mock.Mock())
):
volume_tasks.stop(ctx=_ctx)
# correctly wiped
volume.wipe = mock.Mock(return_value=0)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.stop(ctx=_ctx)
def test_delete(self):
# check correct handle exception with empty connection
self._test_check_correct_connect_action(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.delete)
self._test_empty_volume(volume_tasks.delete)
self._test_reused_object(
"cloudify_libvirt.volume_tasks.libvirt.open",
volume_tasks.delete)
self._test_no_resource_id(volume_tasks.delete)
# failed to remove
_ctx = self._create_ctx()
_ctx.instance.runtime_properties['resource_id'] = 'volume'
_ctx.instance.runtime_properties['params'] = {'pool': 'pool_name'}
volume = mock.Mock()
volume.name = mock.Mock(return_value="volume")
volume.delete = mock.Mock(return_value=-1)
pool = mock.Mock()
pool.name = mock.Mock(return_value="pool")
pool.storageVolLookupByName = mock.Mock(return_value=volume)
connect = self._create_fake_connection()
connect.storagePoolLookupByName = mock.Mock(return_value=pool)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
with self.assertRaisesRegexp(
NonRecoverableError,
'Can not undefine volume.'
):
volume_tasks.delete(ctx=_ctx)
# sucessful remove
volume.delete = mock.Mock(return_value=0)
with mock.patch(
"cloudify_libvirt.volume_tasks.libvirt.open",
mock.Mock(return_value=connect)
):
volume_tasks.delete(ctx=_ctx)
self.assertEqual(
_ctx.instance.runtime_properties,
{
'backups': {},
'libvirt_auth': {'a': 'd'},
'params': {},
'resource_id': None
}
)
if __name__ == '__main__':
unittest.main()
| 39.850904
| 79
| 0.571483
| 2,608
| 26,461
| 5.504601
| 0.082439
| 0.056283
| 0.072165
| 0.097938
| 0.837072
| 0.818821
| 0.794023
| 0.77445
| 0.756478
| 0.728058
| 0
| 0.001921
| 0.331129
| 26,461
| 663
| 80
| 39.911011
| 0.809198
| 0.060466
| 0
| 0.760073
| 0
| 0
| 0.15704
| 0.078923
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.031136
| false
| 0
| 0.014652
| 0
| 0.051282
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40b0237206a0ab48729933a6440425eaeefc2534
| 106,946
|
py
|
Python
|
sdk/python/pulumi_aws/lb/outputs.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/lb/outputs.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/lb/outputs.py
|
alexbowers/pulumi-aws
|
7dbdb03b1e4f7c0d51d5b5d17233ff4465c3eff5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'ListenerDefaultAction',
'ListenerDefaultActionAuthenticateCognito',
'ListenerDefaultActionAuthenticateOidc',
'ListenerDefaultActionFixedResponse',
'ListenerDefaultActionForward',
'ListenerDefaultActionForwardStickiness',
'ListenerDefaultActionForwardTargetGroup',
'ListenerDefaultActionRedirect',
'ListenerRuleAction',
'ListenerRuleActionAuthenticateCognito',
'ListenerRuleActionAuthenticateOidc',
'ListenerRuleActionFixedResponse',
'ListenerRuleActionForward',
'ListenerRuleActionForwardStickiness',
'ListenerRuleActionForwardTargetGroup',
'ListenerRuleActionRedirect',
'ListenerRuleCondition',
'ListenerRuleConditionHostHeader',
'ListenerRuleConditionHttpHeader',
'ListenerRuleConditionHttpRequestMethod',
'ListenerRuleConditionPathPattern',
'ListenerRuleConditionQueryString',
'ListenerRuleConditionSourceIp',
'LoadBalancerAccessLogs',
'LoadBalancerSubnetMapping',
'TargetGroupHealthCheck',
'TargetGroupStickiness',
'GetListenerDefaultActionResult',
'GetListenerDefaultActionAuthenticateCognitoResult',
'GetListenerDefaultActionAuthenticateOidcResult',
'GetListenerDefaultActionFixedResponseResult',
'GetListenerDefaultActionForwardResult',
'GetListenerDefaultActionForwardStickinessResult',
'GetListenerDefaultActionForwardTargetGroupResult',
'GetListenerDefaultActionRedirectResult',
'GetLoadBalancerAccessLogsResult',
'GetLoadBalancerSubnetMappingResult',
'GetTargetGroupHealthCheckResult',
'GetTargetGroupStickinessResult',
]
@pulumi.output_type
class ListenerDefaultAction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authenticateCognito":
suggest = "authenticate_cognito"
elif key == "authenticateOidc":
suggest = "authenticate_oidc"
elif key == "fixedResponse":
suggest = "fixed_response"
elif key == "targetGroupArn":
suggest = "target_group_arn"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultAction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultAction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultAction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
type: str,
authenticate_cognito: Optional['outputs.ListenerDefaultActionAuthenticateCognito'] = None,
authenticate_oidc: Optional['outputs.ListenerDefaultActionAuthenticateOidc'] = None,
fixed_response: Optional['outputs.ListenerDefaultActionFixedResponse'] = None,
forward: Optional['outputs.ListenerDefaultActionForward'] = None,
order: Optional[int] = None,
redirect: Optional['outputs.ListenerDefaultActionRedirect'] = None,
target_group_arn: Optional[str] = None):
"""
:param str type: Type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`.
:param 'ListenerDefaultActionAuthenticateCognitoArgs' authenticate_cognito: Configuration block for using Amazon Cognito to authenticate users. Specify only when `type` is `authenticate-cognito`. Detailed below.
:param 'ListenerDefaultActionAuthenticateOidcArgs' authenticate_oidc: Configuration block for an identity provider that is compliant with OpenID Connect (OIDC). Specify only when `type` is `authenticate-oidc`. Detailed below.
:param 'ListenerDefaultActionFixedResponseArgs' fixed_response: Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`.
:param 'ListenerDefaultActionForwardArgs' forward: Configuration block for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`. Detailed below.
:param int order: Order for the action. This value is required for rules with multiple actions. The action with the lowest value for order is performed first. Valid values are between `1` and `50000`.
:param 'ListenerDefaultActionRedirectArgs' redirect: Configuration block for creating a redirect action. Required if `type` is `redirect`. Detailed below.
:param str target_group_arn: ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead.
"""
pulumi.set(__self__, "type", type)
if authenticate_cognito is not None:
pulumi.set(__self__, "authenticate_cognito", authenticate_cognito)
if authenticate_oidc is not None:
pulumi.set(__self__, "authenticate_oidc", authenticate_oidc)
if fixed_response is not None:
pulumi.set(__self__, "fixed_response", fixed_response)
if forward is not None:
pulumi.set(__self__, "forward", forward)
if order is not None:
pulumi.set(__self__, "order", order)
if redirect is not None:
pulumi.set(__self__, "redirect", redirect)
if target_group_arn is not None:
pulumi.set(__self__, "target_group_arn", target_group_arn)
@property
@pulumi.getter
def type(self) -> str:
"""
Type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="authenticateCognito")
def authenticate_cognito(self) -> Optional['outputs.ListenerDefaultActionAuthenticateCognito']:
"""
Configuration block for using Amazon Cognito to authenticate users. Specify only when `type` is `authenticate-cognito`. Detailed below.
"""
return pulumi.get(self, "authenticate_cognito")
@property
@pulumi.getter(name="authenticateOidc")
def authenticate_oidc(self) -> Optional['outputs.ListenerDefaultActionAuthenticateOidc']:
"""
Configuration block for an identity provider that is compliant with OpenID Connect (OIDC). Specify only when `type` is `authenticate-oidc`. Detailed below.
"""
return pulumi.get(self, "authenticate_oidc")
@property
@pulumi.getter(name="fixedResponse")
def fixed_response(self) -> Optional['outputs.ListenerDefaultActionFixedResponse']:
"""
Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`.
"""
return pulumi.get(self, "fixed_response")
@property
@pulumi.getter
def forward(self) -> Optional['outputs.ListenerDefaultActionForward']:
"""
Configuration block for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`. Detailed below.
"""
return pulumi.get(self, "forward")
@property
@pulumi.getter
def order(self) -> Optional[int]:
"""
Order for the action. This value is required for rules with multiple actions. The action with the lowest value for order is performed first. Valid values are between `1` and `50000`.
"""
return pulumi.get(self, "order")
@property
@pulumi.getter
def redirect(self) -> Optional['outputs.ListenerDefaultActionRedirect']:
"""
Configuration block for creating a redirect action. Required if `type` is `redirect`. Detailed below.
"""
return pulumi.get(self, "redirect")
@property
@pulumi.getter(name="targetGroupArn")
def target_group_arn(self) -> Optional[str]:
"""
ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead.
"""
return pulumi.get(self, "target_group_arn")
@pulumi.output_type
class ListenerDefaultActionAuthenticateCognito(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "userPoolArn":
suggest = "user_pool_arn"
elif key == "userPoolClientId":
suggest = "user_pool_client_id"
elif key == "userPoolDomain":
suggest = "user_pool_domain"
elif key == "authenticationRequestExtraParams":
suggest = "authentication_request_extra_params"
elif key == "onUnauthenticatedRequest":
suggest = "on_unauthenticated_request"
elif key == "sessionCookieName":
suggest = "session_cookie_name"
elif key == "sessionTimeout":
suggest = "session_timeout"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionAuthenticateCognito. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionAuthenticateCognito.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionAuthenticateCognito.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
user_pool_arn: str,
user_pool_client_id: str,
user_pool_domain: str,
authentication_request_extra_params: Optional[Mapping[str, str]] = None,
on_unauthenticated_request: Optional[str] = None,
scope: Optional[str] = None,
session_cookie_name: Optional[str] = None,
session_timeout: Optional[int] = None):
"""
:param str user_pool_arn: ARN of the Cognito user pool.
:param str user_pool_client_id: ID of the Cognito user pool client.
:param str user_pool_domain: Domain prefix or fully-qualified domain name of the Cognito user pool.
:param Mapping[str, str] authentication_request_extra_params: Query parameters to include in the redirect request to the authorization endpoint. Max: 10.
:param str on_unauthenticated_request: Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
:param str scope: Set of user claims to be requested from the IdP.
:param str session_cookie_name: Name of the cookie used to maintain session information.
:param int session_timeout: Maximum duration of the authentication session, in seconds.
"""
pulumi.set(__self__, "user_pool_arn", user_pool_arn)
pulumi.set(__self__, "user_pool_client_id", user_pool_client_id)
pulumi.set(__self__, "user_pool_domain", user_pool_domain)
if authentication_request_extra_params is not None:
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
if on_unauthenticated_request is not None:
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if session_cookie_name is not None:
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
if session_timeout is not None:
pulumi.set(__self__, "session_timeout", session_timeout)
@property
@pulumi.getter(name="userPoolArn")
def user_pool_arn(self) -> str:
"""
ARN of the Cognito user pool.
"""
return pulumi.get(self, "user_pool_arn")
@property
@pulumi.getter(name="userPoolClientId")
def user_pool_client_id(self) -> str:
"""
ID of the Cognito user pool client.
"""
return pulumi.get(self, "user_pool_client_id")
@property
@pulumi.getter(name="userPoolDomain")
def user_pool_domain(self) -> str:
"""
Domain prefix or fully-qualified domain name of the Cognito user pool.
"""
return pulumi.get(self, "user_pool_domain")
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Optional[Mapping[str, str]]:
"""
Query parameters to include in the redirect request to the authorization endpoint. Max: 10.
"""
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> Optional[str]:
"""
Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
"""
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
Set of user claims to be requested from the IdP.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> Optional[str]:
"""
Name of the cookie used to maintain session information.
"""
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> Optional[int]:
"""
Maximum duration of the authentication session, in seconds.
"""
return pulumi.get(self, "session_timeout")
@pulumi.output_type
class ListenerDefaultActionAuthenticateOidc(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authorizationEndpoint":
suggest = "authorization_endpoint"
elif key == "clientId":
suggest = "client_id"
elif key == "clientSecret":
suggest = "client_secret"
elif key == "tokenEndpoint":
suggest = "token_endpoint"
elif key == "userInfoEndpoint":
suggest = "user_info_endpoint"
elif key == "authenticationRequestExtraParams":
suggest = "authentication_request_extra_params"
elif key == "onUnauthenticatedRequest":
suggest = "on_unauthenticated_request"
elif key == "sessionCookieName":
suggest = "session_cookie_name"
elif key == "sessionTimeout":
suggest = "session_timeout"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionAuthenticateOidc. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionAuthenticateOidc.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionAuthenticateOidc.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
authorization_endpoint: str,
client_id: str,
client_secret: str,
issuer: str,
token_endpoint: str,
user_info_endpoint: str,
authentication_request_extra_params: Optional[Mapping[str, str]] = None,
on_unauthenticated_request: Optional[str] = None,
scope: Optional[str] = None,
session_cookie_name: Optional[str] = None,
session_timeout: Optional[int] = None):
"""
:param str authorization_endpoint: Authorization endpoint of the IdP.
:param str client_id: OAuth 2.0 client identifier.
:param str client_secret: OAuth 2.0 client secret.
:param str issuer: OIDC issuer identifier of the IdP.
:param str token_endpoint: Token endpoint of the IdP.
:param str user_info_endpoint: User info endpoint of the IdP.
:param Mapping[str, str] authentication_request_extra_params: Query parameters to include in the redirect request to the authorization endpoint. Max: 10.
:param str on_unauthenticated_request: Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
:param str scope: Set of user claims to be requested from the IdP.
:param str session_cookie_name: Name of the cookie used to maintain session information.
:param int session_timeout: Maximum duration of the authentication session, in seconds.
"""
pulumi.set(__self__, "authorization_endpoint", authorization_endpoint)
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "client_secret", client_secret)
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "token_endpoint", token_endpoint)
pulumi.set(__self__, "user_info_endpoint", user_info_endpoint)
if authentication_request_extra_params is not None:
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
if on_unauthenticated_request is not None:
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if session_cookie_name is not None:
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
if session_timeout is not None:
pulumi.set(__self__, "session_timeout", session_timeout)
@property
@pulumi.getter(name="authorizationEndpoint")
def authorization_endpoint(self) -> str:
"""
Authorization endpoint of the IdP.
"""
return pulumi.get(self, "authorization_endpoint")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
OAuth 2.0 client identifier.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> str:
"""
OAuth 2.0 client secret.
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter
def issuer(self) -> str:
"""
OIDC issuer identifier of the IdP.
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="tokenEndpoint")
def token_endpoint(self) -> str:
"""
Token endpoint of the IdP.
"""
return pulumi.get(self, "token_endpoint")
@property
@pulumi.getter(name="userInfoEndpoint")
def user_info_endpoint(self) -> str:
"""
User info endpoint of the IdP.
"""
return pulumi.get(self, "user_info_endpoint")
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Optional[Mapping[str, str]]:
"""
Query parameters to include in the redirect request to the authorization endpoint. Max: 10.
"""
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> Optional[str]:
"""
Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
"""
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
Set of user claims to be requested from the IdP.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> Optional[str]:
"""
Name of the cookie used to maintain session information.
"""
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> Optional[int]:
"""
Maximum duration of the authentication session, in seconds.
"""
return pulumi.get(self, "session_timeout")
@pulumi.output_type
class ListenerDefaultActionFixedResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "contentType":
suggest = "content_type"
elif key == "messageBody":
suggest = "message_body"
elif key == "statusCode":
suggest = "status_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionFixedResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionFixedResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionFixedResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
content_type: str,
message_body: Optional[str] = None,
status_code: Optional[str] = None):
"""
:param str content_type: Content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`.
:param str message_body: Message body.
:param str status_code: HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
"""
pulumi.set(__self__, "content_type", content_type)
if message_body is not None:
pulumi.set(__self__, "message_body", message_body)
if status_code is not None:
pulumi.set(__self__, "status_code", status_code)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> str:
"""
Content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="messageBody")
def message_body(self) -> Optional[str]:
"""
Message body.
"""
return pulumi.get(self, "message_body")
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> Optional[str]:
"""
HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
"""
return pulumi.get(self, "status_code")
@pulumi.output_type
class ListenerDefaultActionForward(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetGroups":
suggest = "target_groups"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionForward. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionForward.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionForward.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_groups: Sequence['outputs.ListenerDefaultActionForwardTargetGroup'],
stickiness: Optional['outputs.ListenerDefaultActionForwardStickiness'] = None):
"""
:param Sequence['ListenerDefaultActionForwardTargetGroupArgs'] target_groups: Set of 1-5 target group blocks. Detailed below.
:param 'ListenerDefaultActionForwardStickinessArgs' stickiness: Configuration block for target group stickiness for the rule. Detailed below.
"""
pulumi.set(__self__, "target_groups", target_groups)
if stickiness is not None:
pulumi.set(__self__, "stickiness", stickiness)
@property
@pulumi.getter(name="targetGroups")
def target_groups(self) -> Sequence['outputs.ListenerDefaultActionForwardTargetGroup']:
"""
Set of 1-5 target group blocks. Detailed below.
"""
return pulumi.get(self, "target_groups")
@property
@pulumi.getter
def stickiness(self) -> Optional['outputs.ListenerDefaultActionForwardStickiness']:
"""
Configuration block for target group stickiness for the rule. Detailed below.
"""
return pulumi.get(self, "stickiness")
@pulumi.output_type
class ListenerDefaultActionForwardStickiness(dict):
def __init__(__self__, *,
duration: int,
enabled: Optional[bool] = None):
"""
:param int duration: Time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days).
:param bool enabled: Whether target group stickiness is enabled. Default is `false`.
"""
pulumi.set(__self__, "duration", duration)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def duration(self) -> int:
"""
Time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days).
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Whether target group stickiness is enabled. Default is `false`.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ListenerDefaultActionForwardTargetGroup(dict):
def __init__(__self__, *,
arn: str,
weight: Optional[int] = None):
"""
:param str arn: ARN of the target group.
:param int weight: Weight. The range is 0 to 999.
"""
pulumi.set(__self__, "arn", arn)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def arn(self) -> str:
"""
ARN of the target group.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def weight(self) -> Optional[int]:
"""
Weight. The range is 0 to 999.
"""
return pulumi.get(self, "weight")
@pulumi.output_type
class ListenerDefaultActionRedirect(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "statusCode":
suggest = "status_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionRedirect. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionRedirect.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionRedirect.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
status_code: str,
host: Optional[str] = None,
path: Optional[str] = None,
port: Optional[str] = None,
protocol: Optional[str] = None,
query: Optional[str] = None):
"""
:param str status_code: HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
:param str host: Hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`.
:param str path: Absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`.
:param str port: Port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`.
:param str protocol: Protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`.
:param str query: Query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`.
"""
pulumi.set(__self__, "status_code", status_code)
if host is not None:
pulumi.set(__self__, "host", host)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if query is not None:
pulumi.set(__self__, "query", query)
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> str:
"""
HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
"""
return pulumi.get(self, "status_code")
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
Hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> Optional[str]:
"""
Port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
Protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def query(self) -> Optional[str]:
"""
Query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class ListenerRuleAction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authenticateCognito":
suggest = "authenticate_cognito"
elif key == "authenticateOidc":
suggest = "authenticate_oidc"
elif key == "fixedResponse":
suggest = "fixed_response"
elif key == "targetGroupArn":
suggest = "target_group_arn"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleAction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleAction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleAction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
type: str,
authenticate_cognito: Optional['outputs.ListenerRuleActionAuthenticateCognito'] = None,
authenticate_oidc: Optional['outputs.ListenerRuleActionAuthenticateOidc'] = None,
fixed_response: Optional['outputs.ListenerRuleActionFixedResponse'] = None,
forward: Optional['outputs.ListenerRuleActionForward'] = None,
order: Optional[int] = None,
redirect: Optional['outputs.ListenerRuleActionRedirect'] = None,
target_group_arn: Optional[str] = None):
"""
:param str type: The type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`.
:param 'ListenerRuleActionAuthenticateCognitoArgs' authenticate_cognito: Information for creating an authenticate action using Cognito. Required if `type` is `authenticate-cognito`.
:param 'ListenerRuleActionAuthenticateOidcArgs' authenticate_oidc: Information for creating an authenticate action using OIDC. Required if `type` is `authenticate-oidc`.
:param 'ListenerRuleActionFixedResponseArgs' fixed_response: Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`.
:param 'ListenerRuleActionForwardArgs' forward: Information for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`.
:param 'ListenerRuleActionRedirectArgs' redirect: Information for creating a redirect action. Required if `type` is `redirect`.
:param str target_group_arn: The ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead.
"""
pulumi.set(__self__, "type", type)
if authenticate_cognito is not None:
pulumi.set(__self__, "authenticate_cognito", authenticate_cognito)
if authenticate_oidc is not None:
pulumi.set(__self__, "authenticate_oidc", authenticate_oidc)
if fixed_response is not None:
pulumi.set(__self__, "fixed_response", fixed_response)
if forward is not None:
pulumi.set(__self__, "forward", forward)
if order is not None:
pulumi.set(__self__, "order", order)
if redirect is not None:
pulumi.set(__self__, "redirect", redirect)
if target_group_arn is not None:
pulumi.set(__self__, "target_group_arn", target_group_arn)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="authenticateCognito")
def authenticate_cognito(self) -> Optional['outputs.ListenerRuleActionAuthenticateCognito']:
"""
Information for creating an authenticate action using Cognito. Required if `type` is `authenticate-cognito`.
"""
return pulumi.get(self, "authenticate_cognito")
@property
@pulumi.getter(name="authenticateOidc")
def authenticate_oidc(self) -> Optional['outputs.ListenerRuleActionAuthenticateOidc']:
"""
Information for creating an authenticate action using OIDC. Required if `type` is `authenticate-oidc`.
"""
return pulumi.get(self, "authenticate_oidc")
@property
@pulumi.getter(name="fixedResponse")
def fixed_response(self) -> Optional['outputs.ListenerRuleActionFixedResponse']:
"""
Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`.
"""
return pulumi.get(self, "fixed_response")
@property
@pulumi.getter
def forward(self) -> Optional['outputs.ListenerRuleActionForward']:
"""
Information for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`.
"""
return pulumi.get(self, "forward")
@property
@pulumi.getter
def order(self) -> Optional[int]:
return pulumi.get(self, "order")
@property
@pulumi.getter
def redirect(self) -> Optional['outputs.ListenerRuleActionRedirect']:
"""
Information for creating a redirect action. Required if `type` is `redirect`.
"""
return pulumi.get(self, "redirect")
@property
@pulumi.getter(name="targetGroupArn")
def target_group_arn(self) -> Optional[str]:
"""
The ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead.
"""
return pulumi.get(self, "target_group_arn")
@pulumi.output_type
class ListenerRuleActionAuthenticateCognito(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "userPoolArn":
suggest = "user_pool_arn"
elif key == "userPoolClientId":
suggest = "user_pool_client_id"
elif key == "userPoolDomain":
suggest = "user_pool_domain"
elif key == "authenticationRequestExtraParams":
suggest = "authentication_request_extra_params"
elif key == "onUnauthenticatedRequest":
suggest = "on_unauthenticated_request"
elif key == "sessionCookieName":
suggest = "session_cookie_name"
elif key == "sessionTimeout":
suggest = "session_timeout"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleActionAuthenticateCognito. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleActionAuthenticateCognito.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleActionAuthenticateCognito.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
user_pool_arn: str,
user_pool_client_id: str,
user_pool_domain: str,
authentication_request_extra_params: Optional[Mapping[str, str]] = None,
on_unauthenticated_request: Optional[str] = None,
scope: Optional[str] = None,
session_cookie_name: Optional[str] = None,
session_timeout: Optional[int] = None):
"""
:param str user_pool_arn: The ARN of the Cognito user pool.
:param str user_pool_client_id: The ID of the Cognito user pool client.
:param str user_pool_domain: The domain prefix or fully-qualified domain name of the Cognito user pool.
:param Mapping[str, str] authentication_request_extra_params: The query parameters to include in the redirect request to the authorization endpoint. Max: 10.
:param str on_unauthenticated_request: The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
:param str scope: The set of user claims to be requested from the IdP.
:param str session_cookie_name: The name of the cookie used to maintain session information.
:param int session_timeout: The maximum duration of the authentication session, in seconds.
"""
pulumi.set(__self__, "user_pool_arn", user_pool_arn)
pulumi.set(__self__, "user_pool_client_id", user_pool_client_id)
pulumi.set(__self__, "user_pool_domain", user_pool_domain)
if authentication_request_extra_params is not None:
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
if on_unauthenticated_request is not None:
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if session_cookie_name is not None:
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
if session_timeout is not None:
pulumi.set(__self__, "session_timeout", session_timeout)
@property
@pulumi.getter(name="userPoolArn")
def user_pool_arn(self) -> str:
"""
The ARN of the Cognito user pool.
"""
return pulumi.get(self, "user_pool_arn")
@property
@pulumi.getter(name="userPoolClientId")
def user_pool_client_id(self) -> str:
"""
The ID of the Cognito user pool client.
"""
return pulumi.get(self, "user_pool_client_id")
@property
@pulumi.getter(name="userPoolDomain")
def user_pool_domain(self) -> str:
"""
The domain prefix or fully-qualified domain name of the Cognito user pool.
"""
return pulumi.get(self, "user_pool_domain")
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Optional[Mapping[str, str]]:
"""
The query parameters to include in the redirect request to the authorization endpoint. Max: 10.
"""
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> Optional[str]:
"""
The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
"""
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
The set of user claims to be requested from the IdP.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> Optional[str]:
"""
The name of the cookie used to maintain session information.
"""
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> Optional[int]:
"""
The maximum duration of the authentication session, in seconds.
"""
return pulumi.get(self, "session_timeout")
@pulumi.output_type
class ListenerRuleActionAuthenticateOidc(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authorizationEndpoint":
suggest = "authorization_endpoint"
elif key == "clientId":
suggest = "client_id"
elif key == "clientSecret":
suggest = "client_secret"
elif key == "tokenEndpoint":
suggest = "token_endpoint"
elif key == "userInfoEndpoint":
suggest = "user_info_endpoint"
elif key == "authenticationRequestExtraParams":
suggest = "authentication_request_extra_params"
elif key == "onUnauthenticatedRequest":
suggest = "on_unauthenticated_request"
elif key == "sessionCookieName":
suggest = "session_cookie_name"
elif key == "sessionTimeout":
suggest = "session_timeout"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleActionAuthenticateOidc. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleActionAuthenticateOidc.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleActionAuthenticateOidc.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
authorization_endpoint: str,
client_id: str,
client_secret: str,
issuer: str,
token_endpoint: str,
user_info_endpoint: str,
authentication_request_extra_params: Optional[Mapping[str, str]] = None,
on_unauthenticated_request: Optional[str] = None,
scope: Optional[str] = None,
session_cookie_name: Optional[str] = None,
session_timeout: Optional[int] = None):
"""
:param str authorization_endpoint: The authorization endpoint of the IdP.
:param str client_id: The OAuth 2.0 client identifier.
:param str client_secret: The OAuth 2.0 client secret.
:param str issuer: The OIDC issuer identifier of the IdP.
:param str token_endpoint: The token endpoint of the IdP.
:param str user_info_endpoint: The user info endpoint of the IdP.
:param Mapping[str, str] authentication_request_extra_params: The query parameters to include in the redirect request to the authorization endpoint. Max: 10.
:param str on_unauthenticated_request: The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
:param str scope: The set of user claims to be requested from the IdP.
:param str session_cookie_name: The name of the cookie used to maintain session information.
:param int session_timeout: The maximum duration of the authentication session, in seconds.
"""
pulumi.set(__self__, "authorization_endpoint", authorization_endpoint)
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "client_secret", client_secret)
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "token_endpoint", token_endpoint)
pulumi.set(__self__, "user_info_endpoint", user_info_endpoint)
if authentication_request_extra_params is not None:
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
if on_unauthenticated_request is not None:
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if session_cookie_name is not None:
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
if session_timeout is not None:
pulumi.set(__self__, "session_timeout", session_timeout)
@property
@pulumi.getter(name="authorizationEndpoint")
def authorization_endpoint(self) -> str:
"""
The authorization endpoint of the IdP.
"""
return pulumi.get(self, "authorization_endpoint")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The OAuth 2.0 client identifier.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> str:
"""
The OAuth 2.0 client secret.
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter
def issuer(self) -> str:
"""
The OIDC issuer identifier of the IdP.
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="tokenEndpoint")
def token_endpoint(self) -> str:
"""
The token endpoint of the IdP.
"""
return pulumi.get(self, "token_endpoint")
@property
@pulumi.getter(name="userInfoEndpoint")
def user_info_endpoint(self) -> str:
"""
The user info endpoint of the IdP.
"""
return pulumi.get(self, "user_info_endpoint")
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Optional[Mapping[str, str]]:
"""
The query parameters to include in the redirect request to the authorization endpoint. Max: 10.
"""
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> Optional[str]:
"""
The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate`
"""
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
The set of user claims to be requested from the IdP.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> Optional[str]:
"""
The name of the cookie used to maintain session information.
"""
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> Optional[int]:
"""
The maximum duration of the authentication session, in seconds.
"""
return pulumi.get(self, "session_timeout")
@pulumi.output_type
class ListenerRuleActionFixedResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "contentType":
suggest = "content_type"
elif key == "messageBody":
suggest = "message_body"
elif key == "statusCode":
suggest = "status_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleActionFixedResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleActionFixedResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleActionFixedResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
content_type: str,
message_body: Optional[str] = None,
status_code: Optional[str] = None):
"""
:param str content_type: The content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`.
:param str message_body: The message body.
:param str status_code: The HTTP response code. Valid values are `2XX`, `4XX`, or `5XX`.
"""
pulumi.set(__self__, "content_type", content_type)
if message_body is not None:
pulumi.set(__self__, "message_body", message_body)
if status_code is not None:
pulumi.set(__self__, "status_code", status_code)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> str:
"""
The content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="messageBody")
def message_body(self) -> Optional[str]:
"""
The message body.
"""
return pulumi.get(self, "message_body")
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> Optional[str]:
"""
The HTTP response code. Valid values are `2XX`, `4XX`, or `5XX`.
"""
return pulumi.get(self, "status_code")
@pulumi.output_type
class ListenerRuleActionForward(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetGroups":
suggest = "target_groups"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleActionForward. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleActionForward.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleActionForward.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_groups: Sequence['outputs.ListenerRuleActionForwardTargetGroup'],
stickiness: Optional['outputs.ListenerRuleActionForwardStickiness'] = None):
"""
:param Sequence['ListenerRuleActionForwardTargetGroupArgs'] target_groups: One or more target groups block.
:param 'ListenerRuleActionForwardStickinessArgs' stickiness: The target group stickiness for the rule.
"""
pulumi.set(__self__, "target_groups", target_groups)
if stickiness is not None:
pulumi.set(__self__, "stickiness", stickiness)
@property
@pulumi.getter(name="targetGroups")
def target_groups(self) -> Sequence['outputs.ListenerRuleActionForwardTargetGroup']:
"""
One or more target groups block.
"""
return pulumi.get(self, "target_groups")
@property
@pulumi.getter
def stickiness(self) -> Optional['outputs.ListenerRuleActionForwardStickiness']:
"""
The target group stickiness for the rule.
"""
return pulumi.get(self, "stickiness")
@pulumi.output_type
class ListenerRuleActionForwardStickiness(dict):
def __init__(__self__, *,
duration: int,
enabled: Optional[bool] = None):
"""
:param int duration: The time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days).
:param bool enabled: Indicates whether target group stickiness is enabled.
"""
pulumi.set(__self__, "duration", duration)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def duration(self) -> int:
"""
The time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days).
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Indicates whether target group stickiness is enabled.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ListenerRuleActionForwardTargetGroup(dict):
def __init__(__self__, *,
arn: str,
weight: Optional[int] = None):
"""
:param str arn: The Amazon Resource Name (ARN) of the target group.
:param int weight: The weight. The range is 0 to 999.
"""
pulumi.set(__self__, "arn", arn)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def arn(self) -> str:
"""
The Amazon Resource Name (ARN) of the target group.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def weight(self) -> Optional[int]:
"""
The weight. The range is 0 to 999.
"""
return pulumi.get(self, "weight")
@pulumi.output_type
class ListenerRuleActionRedirect(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "statusCode":
suggest = "status_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleActionRedirect. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleActionRedirect.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleActionRedirect.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
status_code: str,
host: Optional[str] = None,
path: Optional[str] = None,
port: Optional[str] = None,
protocol: Optional[str] = None,
query: Optional[str] = None):
"""
:param str status_code: The HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
:param str host: The hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`.
:param str path: The absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`.
:param str port: The port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`.
:param str protocol: The protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`.
:param str query: The query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`.
"""
pulumi.set(__self__, "status_code", status_code)
if host is not None:
pulumi.set(__self__, "host", host)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if query is not None:
pulumi.set(__self__, "query", query)
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> str:
"""
The HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`).
"""
return pulumi.get(self, "status_code")
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
The absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> Optional[str]:
"""
The port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
The protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def query(self) -> Optional[str]:
"""
The query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class ListenerRuleCondition(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "hostHeader":
suggest = "host_header"
elif key == "httpHeader":
suggest = "http_header"
elif key == "httpRequestMethod":
suggest = "http_request_method"
elif key == "pathPattern":
suggest = "path_pattern"
elif key == "queryStrings":
suggest = "query_strings"
elif key == "sourceIp":
suggest = "source_ip"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleCondition. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleCondition.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleCondition.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
host_header: Optional['outputs.ListenerRuleConditionHostHeader'] = None,
http_header: Optional['outputs.ListenerRuleConditionHttpHeader'] = None,
http_request_method: Optional['outputs.ListenerRuleConditionHttpRequestMethod'] = None,
path_pattern: Optional['outputs.ListenerRuleConditionPathPattern'] = None,
query_strings: Optional[Sequence['outputs.ListenerRuleConditionQueryString']] = None,
source_ip: Optional['outputs.ListenerRuleConditionSourceIp'] = None):
"""
:param 'ListenerRuleConditionHostHeaderArgs' host_header: Contains a single `values` item which is a list of host header patterns to match. The maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied.
:param 'ListenerRuleConditionHttpHeaderArgs' http_header: HTTP headers to match. HTTP Header block fields documented below.
:param 'ListenerRuleConditionHttpRequestMethodArgs' http_request_method: Contains a single `values` item which is a list of HTTP request methods or verbs to match. Maximum size is 40 characters. Only allowed characters are A-Z, hyphen (-) and underscore (\_). Comparison is case sensitive. Wildcards are not supported. Only one needs to match for the condition to be satisfied. AWS recommends that GET and HEAD requests are routed in the same way because the response to a HEAD request may be cached.
:param 'ListenerRuleConditionPathPatternArgs' path_pattern: Contains a single `values` item which is a list of path patterns to match against the request URL. Maximum size of each pattern is 128 characters. Comparison is case sensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. Path pattern is compared only to the path of the URL, not to its query string. To compare against the query string, use a `query_string` condition.
:param Sequence['ListenerRuleConditionQueryStringArgs'] query_strings: Query strings to match. Query String block fields documented below.
:param 'ListenerRuleConditionSourceIpArgs' source_ip: Contains a single `values` item which is a list of source IP CIDR notations to match. You can use both IPv4 and IPv6 addresses. Wildcards are not supported. Condition is satisfied if the source IP address of the request matches one of the CIDR blocks. Condition is not satisfied by the addresses in the `X-Forwarded-For` header, use `http_header` condition instead.
"""
if host_header is not None:
pulumi.set(__self__, "host_header", host_header)
if http_header is not None:
pulumi.set(__self__, "http_header", http_header)
if http_request_method is not None:
pulumi.set(__self__, "http_request_method", http_request_method)
if path_pattern is not None:
pulumi.set(__self__, "path_pattern", path_pattern)
if query_strings is not None:
pulumi.set(__self__, "query_strings", query_strings)
if source_ip is not None:
pulumi.set(__self__, "source_ip", source_ip)
@property
@pulumi.getter(name="hostHeader")
def host_header(self) -> Optional['outputs.ListenerRuleConditionHostHeader']:
"""
Contains a single `values` item which is a list of host header patterns to match. The maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied.
"""
return pulumi.get(self, "host_header")
@property
@pulumi.getter(name="httpHeader")
def http_header(self) -> Optional['outputs.ListenerRuleConditionHttpHeader']:
"""
HTTP headers to match. HTTP Header block fields documented below.
"""
return pulumi.get(self, "http_header")
@property
@pulumi.getter(name="httpRequestMethod")
def http_request_method(self) -> Optional['outputs.ListenerRuleConditionHttpRequestMethod']:
"""
Contains a single `values` item which is a list of HTTP request methods or verbs to match. Maximum size is 40 characters. Only allowed characters are A-Z, hyphen (-) and underscore (\_). Comparison is case sensitive. Wildcards are not supported. Only one needs to match for the condition to be satisfied. AWS recommends that GET and HEAD requests are routed in the same way because the response to a HEAD request may be cached.
"""
return pulumi.get(self, "http_request_method")
@property
@pulumi.getter(name="pathPattern")
def path_pattern(self) -> Optional['outputs.ListenerRuleConditionPathPattern']:
"""
Contains a single `values` item which is a list of path patterns to match against the request URL. Maximum size of each pattern is 128 characters. Comparison is case sensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. Path pattern is compared only to the path of the URL, not to its query string. To compare against the query string, use a `query_string` condition.
"""
return pulumi.get(self, "path_pattern")
@property
@pulumi.getter(name="queryStrings")
def query_strings(self) -> Optional[Sequence['outputs.ListenerRuleConditionQueryString']]:
"""
Query strings to match. Query String block fields documented below.
"""
return pulumi.get(self, "query_strings")
@property
@pulumi.getter(name="sourceIp")
def source_ip(self) -> Optional['outputs.ListenerRuleConditionSourceIp']:
"""
Contains a single `values` item which is a list of source IP CIDR notations to match. You can use both IPv4 and IPv6 addresses. Wildcards are not supported. Condition is satisfied if the source IP address of the request matches one of the CIDR blocks. Condition is not satisfied by the addresses in the `X-Forwarded-For` header, use `http_header` condition instead.
"""
return pulumi.get(self, "source_ip")
@pulumi.output_type
class ListenerRuleConditionHostHeader(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class ListenerRuleConditionHttpHeader(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpHeaderName":
suggest = "http_header_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerRuleConditionHttpHeader. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerRuleConditionHttpHeader.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerRuleConditionHttpHeader.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
http_header_name: str,
values: Sequence[str]):
"""
:param str http_header_name: Name of HTTP header to search. The maximum size is 40 characters. Comparison is case insensitive. Only RFC7240 characters are supported. Wildcards are not supported. You cannot use HTTP header condition to specify the host header, use a `host-header` condition instead.
:param Sequence[str] values: List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
pulumi.set(__self__, "http_header_name", http_header_name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter(name="httpHeaderName")
def http_header_name(self) -> str:
"""
Name of HTTP header to search. The maximum size is 40 characters. Comparison is case insensitive. Only RFC7240 characters are supported. Wildcards are not supported. You cannot use HTTP header condition to specify the host header, use a `host-header` condition instead.
"""
return pulumi.get(self, "http_header_name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class ListenerRuleConditionHttpRequestMethod(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class ListenerRuleConditionPathPattern(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class ListenerRuleConditionQueryString(dict):
def __init__(__self__, *,
value: str,
key: Optional[str] = None):
"""
:param str value: Query string value pattern to match.
:param str key: Query string key pattern to match.
"""
pulumi.set(__self__, "value", value)
if key is not None:
pulumi.set(__self__, "key", key)
@property
@pulumi.getter
def value(self) -> str:
"""
Query string value pattern to match.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
Query string key pattern to match.
"""
return pulumi.get(self, "key")
@pulumi.output_type
class ListenerRuleConditionSourceIp(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class LoadBalancerAccessLogs(dict):
def __init__(__self__, *,
bucket: str,
enabled: Optional[bool] = None,
prefix: Optional[str] = None):
"""
:param str bucket: The S3 bucket name to store the logs in.
:param bool enabled: Boolean to enable / disable `access_logs`. Defaults to `false`, even when `bucket` is specified.
:param str prefix: The S3 bucket prefix. Logs are stored in the root if not configured.
"""
pulumi.set(__self__, "bucket", bucket)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if prefix is not None:
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter
def bucket(self) -> str:
"""
The S3 bucket name to store the logs in.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Boolean to enable / disable `access_logs`. Defaults to `false`, even when `bucket` is specified.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def prefix(self) -> Optional[str]:
"""
The S3 bucket prefix. Logs are stored in the root if not configured.
"""
return pulumi.get(self, "prefix")
@pulumi.output_type
class LoadBalancerSubnetMapping(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "subnetId":
suggest = "subnet_id"
elif key == "allocationId":
suggest = "allocation_id"
elif key == "ipv6Address":
suggest = "ipv6_address"
elif key == "outpostId":
suggest = "outpost_id"
elif key == "privateIpv4Address":
suggest = "private_ipv4_address"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LoadBalancerSubnetMapping. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LoadBalancerSubnetMapping.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LoadBalancerSubnetMapping.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
subnet_id: str,
allocation_id: Optional[str] = None,
ipv6_address: Optional[str] = None,
outpost_id: Optional[str] = None,
private_ipv4_address: Optional[str] = None):
"""
:param str subnet_id: The id of the subnet of which to attach to the load balancer. You can specify only one subnet per Availability Zone.
:param str allocation_id: The allocation ID of the Elastic IP address.
:param str ipv6_address: An ipv6 address within the subnet to assign to the internet-facing load balancer.
:param str private_ipv4_address: A private ipv4 address within the subnet to assign to the internal-facing load balancer.
"""
pulumi.set(__self__, "subnet_id", subnet_id)
if allocation_id is not None:
pulumi.set(__self__, "allocation_id", allocation_id)
if ipv6_address is not None:
pulumi.set(__self__, "ipv6_address", ipv6_address)
if outpost_id is not None:
pulumi.set(__self__, "outpost_id", outpost_id)
if private_ipv4_address is not None:
pulumi.set(__self__, "private_ipv4_address", private_ipv4_address)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
The id of the subnet of which to attach to the load balancer. You can specify only one subnet per Availability Zone.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="allocationId")
def allocation_id(self) -> Optional[str]:
"""
The allocation ID of the Elastic IP address.
"""
return pulumi.get(self, "allocation_id")
@property
@pulumi.getter(name="ipv6Address")
def ipv6_address(self) -> Optional[str]:
"""
An ipv6 address within the subnet to assign to the internet-facing load balancer.
"""
return pulumi.get(self, "ipv6_address")
@property
@pulumi.getter(name="outpostId")
def outpost_id(self) -> Optional[str]:
return pulumi.get(self, "outpost_id")
@property
@pulumi.getter(name="privateIpv4Address")
def private_ipv4_address(self) -> Optional[str]:
"""
A private ipv4 address within the subnet to assign to the internal-facing load balancer.
"""
return pulumi.get(self, "private_ipv4_address")
@pulumi.output_type
class TargetGroupHealthCheck(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "healthyThreshold":
suggest = "healthy_threshold"
elif key == "unhealthyThreshold":
suggest = "unhealthy_threshold"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TargetGroupHealthCheck. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TargetGroupHealthCheck.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TargetGroupHealthCheck.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enabled: Optional[bool] = None,
healthy_threshold: Optional[int] = None,
interval: Optional[int] = None,
matcher: Optional[str] = None,
path: Optional[str] = None,
port: Optional[str] = None,
protocol: Optional[str] = None,
timeout: Optional[int] = None,
unhealthy_threshold: Optional[int] = None):
"""
:param bool enabled: Whether to enable `stickiness`. Default is `true`.
:param int healthy_threshold: Number of consecutive health checks successes required before considering an unhealthy target healthy. Defaults to 3.
:param int interval: Approximate amount of time, in seconds, between health checks of an individual target. Minimum value 5 seconds, Maximum value 300 seconds. For `lambda` target groups, it needs to be greater as the `timeout` of the underlying `lambda`. Default 30 seconds.
:param str matcher: Response codes to use when checking for a healthy responses from a target. You can specify multiple values (for example, "200,202" for HTTP(s) or "0,12" for GRPC) or a range of values (for example, "200-299" or "0-99"). Required for HTTP/HTTPS/GRPC ALB. Only applies to Application Load Balancers (i.e., HTTP/HTTPS/GRPC) not Network Load Balancers (i.e., TCP).
:param str path: Destination for the health check request. Required for HTTP/HTTPS ALB and HTTP NLB. Only applies to HTTP/HTTPS.
:param str port: Port to use to connect with the target. Valid values are either ports 1-65535, or `traffic-port`. Defaults to `traffic-port`.
:param str protocol: Protocol to use to connect with the target. Defaults to `HTTP`. Not applicable when `target_type` is `lambda`.
:param int timeout: Amount of time, in seconds, during which no response means a failed health check. For Application Load Balancers, the range is 2 to 120 seconds, and the default is 5 seconds for the `instance` target type and 30 seconds for the `lambda` target type. For Network Load Balancers, you cannot set a custom value, and the default is 10 seconds for TCP and HTTPS health checks and 6 seconds for HTTP health checks.
:param int unhealthy_threshold: Number of consecutive health check failures required before considering the target unhealthy. For Network Load Balancers, this value must be the same as the `healthy_threshold`. Defaults to 3.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if matcher is not None:
pulumi.set(__self__, "matcher", matcher)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Whether to enable `stickiness`. Default is `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[int]:
"""
Number of consecutive health checks successes required before considering an unhealthy target healthy. Defaults to 3.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter
def interval(self) -> Optional[int]:
"""
Approximate amount of time, in seconds, between health checks of an individual target. Minimum value 5 seconds, Maximum value 300 seconds. For `lambda` target groups, it needs to be greater as the `timeout` of the underlying `lambda`. Default 30 seconds.
"""
return pulumi.get(self, "interval")
@property
@pulumi.getter
def matcher(self) -> Optional[str]:
"""
Response codes to use when checking for a healthy responses from a target. You can specify multiple values (for example, "200,202" for HTTP(s) or "0,12" for GRPC) or a range of values (for example, "200-299" or "0-99"). Required for HTTP/HTTPS/GRPC ALB. Only applies to Application Load Balancers (i.e., HTTP/HTTPS/GRPC) not Network Load Balancers (i.e., TCP).
"""
return pulumi.get(self, "matcher")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Destination for the health check request. Required for HTTP/HTTPS ALB and HTTP NLB. Only applies to HTTP/HTTPS.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> Optional[str]:
"""
Port to use to connect with the target. Valid values are either ports 1-65535, or `traffic-port`. Defaults to `traffic-port`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
Protocol to use to connect with the target. Defaults to `HTTP`. Not applicable when `target_type` is `lambda`.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def timeout(self) -> Optional[int]:
"""
Amount of time, in seconds, during which no response means a failed health check. For Application Load Balancers, the range is 2 to 120 seconds, and the default is 5 seconds for the `instance` target type and 30 seconds for the `lambda` target type. For Network Load Balancers, you cannot set a custom value, and the default is 10 seconds for TCP and HTTPS health checks and 6 seconds for HTTP health checks.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[int]:
"""
Number of consecutive health check failures required before considering the target unhealthy. For Network Load Balancers, this value must be the same as the `healthy_threshold`. Defaults to 3.
"""
return pulumi.get(self, "unhealthy_threshold")
@pulumi.output_type
class TargetGroupStickiness(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cookieDuration":
suggest = "cookie_duration"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in TargetGroupStickiness. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
TargetGroupStickiness.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
TargetGroupStickiness.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
type: str,
cookie_duration: Optional[int] = None,
enabled: Optional[bool] = None):
"""
:param str type: Type of sticky sessions. The only current possible values are `lb_cookie` for ALBs and `source_ip` for NLBs.
:param int cookie_duration: Only used when the type is `lb_cookie`. The time period, in seconds, during which requests from a client should be routed to the same target. After this time period expires, the load balancer-generated cookie is considered stale. The range is 1 second to 1 week (604800 seconds). The default value is 1 day (86400 seconds).
:param bool enabled: Whether to enable `stickiness`. Default is `true`.
"""
pulumi.set(__self__, "type", type)
if cookie_duration is not None:
pulumi.set(__self__, "cookie_duration", cookie_duration)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def type(self) -> str:
"""
Type of sticky sessions. The only current possible values are `lb_cookie` for ALBs and `source_ip` for NLBs.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="cookieDuration")
def cookie_duration(self) -> Optional[int]:
"""
Only used when the type is `lb_cookie`. The time period, in seconds, during which requests from a client should be routed to the same target. After this time period expires, the load balancer-generated cookie is considered stale. The range is 1 second to 1 week (604800 seconds). The default value is 1 day (86400 seconds).
"""
return pulumi.get(self, "cookie_duration")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Whether to enable `stickiness`. Default is `true`.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetListenerDefaultActionResult(dict):
def __init__(__self__, *,
authenticate_cognitos: Sequence['outputs.GetListenerDefaultActionAuthenticateCognitoResult'],
authenticate_oidcs: Sequence['outputs.GetListenerDefaultActionAuthenticateOidcResult'],
fixed_responses: Sequence['outputs.GetListenerDefaultActionFixedResponseResult'],
forwards: Sequence['outputs.GetListenerDefaultActionForwardResult'],
order: int,
redirects: Sequence['outputs.GetListenerDefaultActionRedirectResult'],
target_group_arn: str,
type: str):
pulumi.set(__self__, "authenticate_cognitos", authenticate_cognitos)
pulumi.set(__self__, "authenticate_oidcs", authenticate_oidcs)
pulumi.set(__self__, "fixed_responses", fixed_responses)
pulumi.set(__self__, "forwards", forwards)
pulumi.set(__self__, "order", order)
pulumi.set(__self__, "redirects", redirects)
pulumi.set(__self__, "target_group_arn", target_group_arn)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="authenticateCognitos")
def authenticate_cognitos(self) -> Sequence['outputs.GetListenerDefaultActionAuthenticateCognitoResult']:
return pulumi.get(self, "authenticate_cognitos")
@property
@pulumi.getter(name="authenticateOidcs")
def authenticate_oidcs(self) -> Sequence['outputs.GetListenerDefaultActionAuthenticateOidcResult']:
return pulumi.get(self, "authenticate_oidcs")
@property
@pulumi.getter(name="fixedResponses")
def fixed_responses(self) -> Sequence['outputs.GetListenerDefaultActionFixedResponseResult']:
return pulumi.get(self, "fixed_responses")
@property
@pulumi.getter
def forwards(self) -> Sequence['outputs.GetListenerDefaultActionForwardResult']:
return pulumi.get(self, "forwards")
@property
@pulumi.getter
def order(self) -> int:
return pulumi.get(self, "order")
@property
@pulumi.getter
def redirects(self) -> Sequence['outputs.GetListenerDefaultActionRedirectResult']:
return pulumi.get(self, "redirects")
@property
@pulumi.getter(name="targetGroupArn")
def target_group_arn(self) -> str:
return pulumi.get(self, "target_group_arn")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@pulumi.output_type
class GetListenerDefaultActionAuthenticateCognitoResult(dict):
def __init__(__self__, *,
authentication_request_extra_params: Mapping[str, str],
on_unauthenticated_request: str,
scope: str,
session_cookie_name: str,
session_timeout: int,
user_pool_arn: str,
user_pool_client_id: str,
user_pool_domain: str):
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
pulumi.set(__self__, "scope", scope)
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
pulumi.set(__self__, "session_timeout", session_timeout)
pulumi.set(__self__, "user_pool_arn", user_pool_arn)
pulumi.set(__self__, "user_pool_client_id", user_pool_client_id)
pulumi.set(__self__, "user_pool_domain", user_pool_domain)
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Mapping[str, str]:
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> str:
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> str:
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> str:
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> int:
return pulumi.get(self, "session_timeout")
@property
@pulumi.getter(name="userPoolArn")
def user_pool_arn(self) -> str:
return pulumi.get(self, "user_pool_arn")
@property
@pulumi.getter(name="userPoolClientId")
def user_pool_client_id(self) -> str:
return pulumi.get(self, "user_pool_client_id")
@property
@pulumi.getter(name="userPoolDomain")
def user_pool_domain(self) -> str:
return pulumi.get(self, "user_pool_domain")
@pulumi.output_type
class GetListenerDefaultActionAuthenticateOidcResult(dict):
def __init__(__self__, *,
authentication_request_extra_params: Mapping[str, str],
authorization_endpoint: str,
client_id: str,
client_secret: str,
issuer: str,
on_unauthenticated_request: str,
scope: str,
session_cookie_name: str,
session_timeout: int,
token_endpoint: str,
user_info_endpoint: str):
pulumi.set(__self__, "authentication_request_extra_params", authentication_request_extra_params)
pulumi.set(__self__, "authorization_endpoint", authorization_endpoint)
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "client_secret", client_secret)
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "on_unauthenticated_request", on_unauthenticated_request)
pulumi.set(__self__, "scope", scope)
pulumi.set(__self__, "session_cookie_name", session_cookie_name)
pulumi.set(__self__, "session_timeout", session_timeout)
pulumi.set(__self__, "token_endpoint", token_endpoint)
pulumi.set(__self__, "user_info_endpoint", user_info_endpoint)
@property
@pulumi.getter(name="authenticationRequestExtraParams")
def authentication_request_extra_params(self) -> Mapping[str, str]:
return pulumi.get(self, "authentication_request_extra_params")
@property
@pulumi.getter(name="authorizationEndpoint")
def authorization_endpoint(self) -> str:
return pulumi.get(self, "authorization_endpoint")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> str:
return pulumi.get(self, "client_secret")
@property
@pulumi.getter
def issuer(self) -> str:
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="onUnauthenticatedRequest")
def on_unauthenticated_request(self) -> str:
return pulumi.get(self, "on_unauthenticated_request")
@property
@pulumi.getter
def scope(self) -> str:
return pulumi.get(self, "scope")
@property
@pulumi.getter(name="sessionCookieName")
def session_cookie_name(self) -> str:
return pulumi.get(self, "session_cookie_name")
@property
@pulumi.getter(name="sessionTimeout")
def session_timeout(self) -> int:
return pulumi.get(self, "session_timeout")
@property
@pulumi.getter(name="tokenEndpoint")
def token_endpoint(self) -> str:
return pulumi.get(self, "token_endpoint")
@property
@pulumi.getter(name="userInfoEndpoint")
def user_info_endpoint(self) -> str:
return pulumi.get(self, "user_info_endpoint")
@pulumi.output_type
class GetListenerDefaultActionFixedResponseResult(dict):
def __init__(__self__, *,
content_type: str,
message_body: str,
status_code: str):
pulumi.set(__self__, "content_type", content_type)
pulumi.set(__self__, "message_body", message_body)
pulumi.set(__self__, "status_code", status_code)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> str:
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="messageBody")
def message_body(self) -> str:
return pulumi.get(self, "message_body")
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> str:
return pulumi.get(self, "status_code")
@pulumi.output_type
class GetListenerDefaultActionForwardResult(dict):
def __init__(__self__, *,
stickinesses: Sequence['outputs.GetListenerDefaultActionForwardStickinessResult'],
target_groups: Sequence['outputs.GetListenerDefaultActionForwardTargetGroupResult']):
pulumi.set(__self__, "stickinesses", stickinesses)
pulumi.set(__self__, "target_groups", target_groups)
@property
@pulumi.getter
def stickinesses(self) -> Sequence['outputs.GetListenerDefaultActionForwardStickinessResult']:
return pulumi.get(self, "stickinesses")
@property
@pulumi.getter(name="targetGroups")
def target_groups(self) -> Sequence['outputs.GetListenerDefaultActionForwardTargetGroupResult']:
return pulumi.get(self, "target_groups")
@pulumi.output_type
class GetListenerDefaultActionForwardStickinessResult(dict):
def __init__(__self__, *,
duration: int,
enabled: bool):
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def duration(self) -> int:
return pulumi.get(self, "duration")
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetListenerDefaultActionForwardTargetGroupResult(dict):
def __init__(__self__, *,
arn: str,
weight: int):
"""
:param str arn: ARN of the listener. Required if `load_balancer_arn` and `port` is not set.
"""
pulumi.set(__self__, "arn", arn)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def arn(self) -> str:
"""
ARN of the listener. Required if `load_balancer_arn` and `port` is not set.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def weight(self) -> int:
return pulumi.get(self, "weight")
@pulumi.output_type
class GetListenerDefaultActionRedirectResult(dict):
def __init__(__self__, *,
host: str,
path: str,
port: str,
protocol: str,
query: str,
status_code: str):
"""
:param str port: Port of the listener. Required if `arn` is not set.
"""
pulumi.set(__self__, "host", host)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "query", query)
pulumi.set(__self__, "status_code", status_code)
@property
@pulumi.getter
def host(self) -> str:
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> str:
"""
Port of the listener. Required if `arn` is not set.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def query(self) -> str:
return pulumi.get(self, "query")
@property
@pulumi.getter(name="statusCode")
def status_code(self) -> str:
return pulumi.get(self, "status_code")
@pulumi.output_type
class GetLoadBalancerAccessLogsResult(dict):
def __init__(__self__, *,
bucket: str,
enabled: bool,
prefix: str):
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter
def bucket(self) -> str:
return pulumi.get(self, "bucket")
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def prefix(self) -> str:
return pulumi.get(self, "prefix")
@pulumi.output_type
class GetLoadBalancerSubnetMappingResult(dict):
def __init__(__self__, *,
allocation_id: str,
ipv6_address: str,
outpost_id: str,
private_ipv4_address: str,
subnet_id: str):
pulumi.set(__self__, "allocation_id", allocation_id)
pulumi.set(__self__, "ipv6_address", ipv6_address)
pulumi.set(__self__, "outpost_id", outpost_id)
pulumi.set(__self__, "private_ipv4_address", private_ipv4_address)
pulumi.set(__self__, "subnet_id", subnet_id)
@property
@pulumi.getter(name="allocationId")
def allocation_id(self) -> str:
return pulumi.get(self, "allocation_id")
@property
@pulumi.getter(name="ipv6Address")
def ipv6_address(self) -> str:
return pulumi.get(self, "ipv6_address")
@property
@pulumi.getter(name="outpostId")
def outpost_id(self) -> str:
return pulumi.get(self, "outpost_id")
@property
@pulumi.getter(name="privateIpv4Address")
def private_ipv4_address(self) -> str:
return pulumi.get(self, "private_ipv4_address")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
return pulumi.get(self, "subnet_id")
@pulumi.output_type
class GetTargetGroupHealthCheckResult(dict):
def __init__(__self__, *,
enabled: bool,
healthy_threshold: int,
interval: int,
matcher: str,
path: str,
port: str,
protocol: str,
timeout: int,
unhealthy_threshold: int):
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
pulumi.set(__self__, "interval", interval)
pulumi.set(__self__, "matcher", matcher)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "timeout", timeout)
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> int:
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter
def interval(self) -> int:
return pulumi.get(self, "interval")
@property
@pulumi.getter
def matcher(self) -> str:
return pulumi.get(self, "matcher")
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> str:
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def timeout(self) -> int:
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> int:
return pulumi.get(self, "unhealthy_threshold")
@pulumi.output_type
class GetTargetGroupStickinessResult(dict):
def __init__(__self__, *,
cookie_duration: int,
enabled: bool,
type: str):
pulumi.set(__self__, "cookie_duration", cookie_duration)
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="cookieDuration")
def cookie_duration(self) -> int:
return pulumi.get(self, "cookie_duration")
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
| 42.744205
| 560
| 0.657079
| 12,194
| 106,946
| 5.567082
| 0.042808
| 0.020314
| 0.03447
| 0.050379
| 0.850394
| 0.838713
| 0.830404
| 0.795154
| 0.786433
| 0.770524
| 0
| 0.005028
| 0.246816
| 106,946
| 2,501
| 561
| 42.761295
| 0.837741
| 0.308249
| 0
| 0.80964
| 1
| 0.010372
| 0.192696
| 0.085387
| 0
| 0
| 0
| 0
| 0
| 1
| 0.164735
| false
| 0
| 0.003661
| 0.037828
| 0.322758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40b2d4e39ba52424791c6f5a189db1d84b17ed0a
| 1,483
|
py
|
Python
|
tools.py
|
instance01/py-S-MCTS
|
cad67da8bb515126311040674d5e6da77f47c90f
|
[
"MIT"
] | null | null | null |
tools.py
|
instance01/py-S-MCTS
|
cad67da8bb515126311040674d5e6da77f47c90f
|
[
"MIT"
] | null | null | null |
tools.py
|
instance01/py-S-MCTS
|
cad67da8bb515126311040674d5e6da77f47c90f
|
[
"MIT"
] | null | null | null |
def print_tree(mcts_obj, root_node):
fifo = []
for child_node in root_node.children:
q = 0
if mcts_obj.visits[child_node] > 0:
q = mcts_obj.Q[child_node] / mcts_obj.visits[child_node]
print(
child_node,
mcts_obj.Q[child_node],
mcts_obj.visits[child_node],
q
)
fifo.append(child_node)
print("-")
while fifo:
root_node = fifo.pop()
print("(", root_node, ")")
for child_node in root_node.children:
q = 0
if mcts_obj.visits[child_node] > 0:
q = mcts_obj.Q[child_node] / mcts_obj.visits[child_node]
print(
child_node,
mcts_obj.Q[child_node],
mcts_obj.visits[child_node],
q
)
fifo.append(child_node)
if root_node.children:
print(" ")
def gen_tree_graph(root_node, G):
fifo = []
root_id = str(root_node)
G.add_node(root_id)
for child_node in root_node.children:
child_id = str(child_node)
G.add_node(child_id)
G.add_edge(root_id, child_id)
fifo.append(child_node)
while fifo:
root_node = fifo.pop()
root_id = str(root_node)
for child_node in root_node.children:
child_id = str(child_node)
G.add_node(child_id)
G.add_edge(root_id, child_id)
fifo.append(child_node)
| 29.66
| 72
| 0.544842
| 198
| 1,483
| 3.757576
| 0.131313
| 0.266129
| 0.104839
| 0.145161
| 0.857527
| 0.822581
| 0.758065
| 0.758065
| 0.758065
| 0.736559
| 0
| 0.004202
| 0.358058
| 1,483
| 49
| 73
| 30.265306
| 0.777311
| 0
| 0
| 0.765957
| 0
| 0
| 0.002697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0
| 0
| 0.042553
| 0.12766
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40f29c05600603542883e0851c97658351c91049
| 2,830
|
py
|
Python
|
tests/tilemap_tests/test_file_formats.py
|
yegarti/arcade
|
1862e61aab9a7dc646265005b0e808d953a9dfe3
|
[
"MIT"
] | 824
|
2016-01-07T19:27:57.000Z
|
2020-08-01T03:15:47.000Z
|
tests/tilemap_tests/test_file_formats.py
|
yegarti/arcade
|
1862e61aab9a7dc646265005b0e808d953a9dfe3
|
[
"MIT"
] | 646
|
2016-01-08T02:42:31.000Z
|
2020-08-03T14:13:27.000Z
|
tests/tilemap_tests/test_file_formats.py
|
yegarti/arcade
|
1862e61aab9a7dc646265005b0e808d953a9dfe3
|
[
"MIT"
] | 221
|
2016-01-07T22:36:33.000Z
|
2020-07-24T23:30:08.000Z
|
import arcade
TILE_SCALING = 1.0
def test_csv_left_up():
# Read in the tiled map
my_map = arcade.load_tilemap("../tiled_maps/csv_left_up_embedded.json")
assert my_map.tile_width == 128
assert my_map.tile_height == 128
assert my_map.width == 10
assert my_map.height == 10
# --- Platforms ---
assert "Blocking Sprites" in my_map.sprite_lists
wall_list = my_map.sprite_lists["Blocking Sprites"]
assert wall_list[0].position == (64, 1216)
assert "dirtCenter" in wall_list[0].texture.name
assert wall_list[1].position == (1216, 1216)
assert "grassCenter" in wall_list[1].texture.name
assert wall_list[2].position == (64, 64)
assert "boxCrate" in wall_list[2].texture.name
def test_csv_right_down():
# Read in the tiled map
my_map = arcade.load_tilemap("../tiled_maps/csv_right_down_external.json")
assert my_map.tile_width == 128
assert my_map.tile_height == 128
assert my_map.width == 10
assert my_map.height == 10
# --- Platforms ---
assert "Blocking Sprites" in my_map.sprite_lists
wall_list = my_map.sprite_lists["Blocking Sprites"]
assert wall_list[0].position == (64, 1216)
assert "dirtCenter" in wall_list[0].texture.name
assert wall_list[1].position == (1216, 1216)
assert "grassCenter" in wall_list[1].texture.name
assert wall_list[2].position == (64, 64)
assert "boxCrate" in wall_list[2].texture.name
def test_base_64_zlib():
# Read in the tiled map
my_map = arcade.load_tilemap("../tiled_maps/base_64_zlib.json")
assert my_map.tile_width == 128
assert my_map.tile_height == 128
assert my_map.width == 10
assert my_map.height == 10
# --- Platforms ---
assert "Blocking Sprites" in my_map.sprite_lists
wall_list = my_map.sprite_lists["Blocking Sprites"]
assert wall_list[0].position == (64, 1216)
assert "dirtCenter" in wall_list[0].texture.name
assert wall_list[1].position == (1216, 1216)
assert "grassCenter" in wall_list[1].texture.name
assert wall_list[2].position == (64, 64)
assert "boxCrate" in wall_list[2].texture.name
def test_base_64_gzip():
# Read in the tiled map
my_map = arcade.load_tilemap("../tiled_maps/base_64_gzip.json")
assert my_map.tile_width == 128
assert my_map.tile_height == 128
assert my_map.width == 10
assert my_map.height == 10
# --- Platforms ---
assert "Blocking Sprites" in my_map.sprite_lists
wall_list = my_map.sprite_lists["Blocking Sprites"]
assert wall_list[0].position == (64, 1216)
assert "dirtCenter" in wall_list[0].texture.name
assert wall_list[1].position == (1216, 1216)
assert "grassCenter" in wall_list[1].texture.name
assert wall_list[2].position == (64, 64)
assert "boxCrate" in wall_list[2].texture.name
| 29.479167
| 78
| 0.687633
| 428
| 2,830
| 4.306075
| 0.114486
| 0.075963
| 0.095496
| 0.065111
| 0.945741
| 0.945741
| 0.945741
| 0.945741
| 0.945741
| 0.945741
| 0
| 0.063811
| 0.191519
| 2,830
| 95
| 79
| 29.789474
| 0.741696
| 0.056184
| 0
| 0.827586
| 0
| 0
| 0.145379
| 0.053719
| 0
| 0
| 0
| 0
| 0.758621
| 1
| 0.068966
| false
| 0
| 0.017241
| 0
| 0.086207
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
906feb82835bdc71bdbce222f2f41bba6cf41123
| 1,904
|
py
|
Python
|
venv-skin/lib/python3.6/site-packages/hashes/ops/sha512_crypt.py
|
nobi1007/flask-apis
|
9ae5d676be5fe7d0669415089bd032f43abda2d8
|
[
"MIT"
] | null | null | null |
venv-skin/lib/python3.6/site-packages/hashes/ops/sha512_crypt.py
|
nobi1007/flask-apis
|
9ae5d676be5fe7d0669415089bd032f43abda2d8
|
[
"MIT"
] | 3
|
2020-06-03T08:09:39.000Z
|
2021-04-30T21:17:43.000Z
|
venv-skin/lib/python3.6/site-packages/hashes/ops/sha512_crypt.py
|
nobi1007/flask-apis
|
9ae5d676be5fe7d0669415089bd032f43abda2d8
|
[
"MIT"
] | null | null | null |
'''
This module generates sha512_crypt hashes
'''
from hashes.common import helpers
from passlib.hash import sha512_crypt
class Algorithm:
def __init__(self):
self.hash_type = "sha512_crypt"
self.description = "This module generates sha512_crypt hashes"
def generate(self, cli_object):
if cli_object.salt is not False:
if cli_object.rounds is not False:
try:
generatedhash = sha512_crypt.encrypt(cli_object.plaintext, rounds=int(cli_object.rounds), salt=cli_object.salt)
return generatedhash
except ValueError:
print helpers.color("sha512_crypt and sha512_crypt require at least 1000 rounds.", warning=True)
print helpers.color("[*] Running with default of 60000 rounds.", warning=True)
generatedhash = sha512_crypt.encrypt(cli_object.plaintext, salt=cli_object.salt)
return generatedhash
else:
generatedhash = sha512_crypt.encrypt(cli_object.plaintext, salt=cli_object.salt)
return generatedhash
else:
if cli_object.rounds is not False:
try:
generatedhash = sha512_crypt.encrypt(cli_object.plaintext, rounds=int(cli_object.rounds))
return generatedhash
except ValueError:
print helpers.color("[*] Warning: sha512_crypt and sha512_crypt require at least 1000 rounds.", warning=True)
print helpers.color("[*] Running with default of 60000 rounds.", warning=True)
generatedhash = sha512_crypt.encrypt(cli_object.plaintext)
return generatedhash
else:
generatedhash = sha512_crypt.encrypt(cli_object.plaintext)
return generatedhash
return
| 44.27907
| 131
| 0.617122
| 200
| 1,904
| 5.705
| 0.255
| 0.118317
| 0.126205
| 0.163015
| 0.836985
| 0.836985
| 0.758983
| 0.667835
| 0.667835
| 0.651183
| 0
| 0.046048
| 0.315651
| 1,904
| 42
| 132
| 45.333333
| 0.829624
| 0
| 0
| 0.617647
| 1
| 0
| 0.143396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.029412
| 0.058824
| null | null | 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
908119afaf07d4c9a1bbbb13384b5d86d17c2f00
| 132
|
py
|
Python
|
lusidtools/jupyter_tools/__init__.py
|
JamesALeedham/lusid-python-tools
|
948b2a849f20bb2b3f052a6bab083646a14cebf7
|
[
"MIT"
] | 4
|
2020-07-24T10:28:14.000Z
|
2021-11-05T10:55:23.000Z
|
lusidtools/jupyter_tools/__init__.py
|
JamesALeedham/lusid-python-tools
|
948b2a849f20bb2b3f052a6bab083646a14cebf7
|
[
"MIT"
] | 185
|
2019-11-21T14:58:38.000Z
|
2022-03-04T18:44:22.000Z
|
lusidtools/jupyter_tools/__init__.py
|
JamesALeedham/lusid-python-tools
|
948b2a849f20bb2b3f052a6bab083646a14cebf7
|
[
"MIT"
] | 6
|
2020-01-28T19:31:04.000Z
|
2021-04-21T15:50:23.000Z
|
from lusidtools.jupyter_tools.stop_execution import StopExecution
from lusidtools.jupyter_tools.hide_code_button import toggle_code
| 44
| 65
| 0.909091
| 18
| 132
| 6.333333
| 0.666667
| 0.245614
| 0.368421
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 132
| 2
| 66
| 66
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
90c67d047de7b03f5be169244e9f8b740cf09cc1
| 79
|
py
|
Python
|
pyscf/fciqmcscf/__init__.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | 1
|
2018-05-02T19:55:30.000Z
|
2018-05-02T19:55:30.000Z
|
pyscf/fciqmcscf/__init__.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | null | null | null |
pyscf/fciqmcscf/__init__.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | 1
|
2018-12-06T03:10:50.000Z
|
2018-12-06T03:10:50.000Z
|
from pyscf.fciqmcscf import fciqmc
from pyscf.fciqmcscf.fciqmc import FCIQMCCI
| 26.333333
| 43
| 0.860759
| 11
| 79
| 6.181818
| 0.545455
| 0.264706
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 79
| 2
| 44
| 39.5
| 0.957746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2941fdb1642632ec4bc674bb41d2b20b43ec3e0b
| 140
|
py
|
Python
|
servertools/plugins/__init__.py
|
cloudtrekkie/servertools
|
4b4e72f5b4c4be3b35f6e414f81c5602ba97db08
|
[
"Apache-2.0"
] | null | null | null |
servertools/plugins/__init__.py
|
cloudtrekkie/servertools
|
4b4e72f5b4c4be3b35f6e414f81c5602ba97db08
|
[
"Apache-2.0"
] | null | null | null |
servertools/plugins/__init__.py
|
cloudtrekkie/servertools
|
4b4e72f5b4c4be3b35f6e414f81c5602ba97db08
|
[
"Apache-2.0"
] | null | null | null |
"""
plugins for servertools
"""
from servertools.plugins.Categories import *
from servertools.plugins.mittwaldserver import MittwaldServer
| 20
| 61
| 0.821429
| 14
| 140
| 8.214286
| 0.5
| 0.26087
| 0.382609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 140
| 6
| 62
| 23.333333
| 0.912698
| 0.164286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
294bf1ea0d8604492497427f152193b41890de5b
| 18,688
|
py
|
Python
|
plugins/intro.py
|
Borgli/Ohminator
|
7aa12e3b0cf77b4c7da73e34fe5787554db41cc0
|
[
"Apache-2.0"
] | 2
|
2018-01-13T15:27:04.000Z
|
2018-03-25T20:41:30.000Z
|
plugins/intro.py
|
Borgli/Ohminator
|
7aa12e3b0cf77b4c7da73e34fe5787554db41cc0
|
[
"Apache-2.0"
] | 10
|
2018-01-13T16:55:03.000Z
|
2019-11-12T09:09:27.000Z
|
plugins/intro.py
|
Borgli/Ohminator
|
7aa12e3b0cf77b4c7da73e34fe5787554db41cc0
|
[
"Apache-2.0"
] | null | null | null |
import os
import os.path
import random
import re
import shutil
import traceback
import urllib.request
import asyncio
from utils import register_command
import utils
class IntroManager:
def __init__(self, client, ohm_server):
self.client = client
self.ohm_server = ohm_server
self.intro_counter = 0
self.intro_finished = asyncio.Event()
self.intro_counter_lock = asyncio.Lock()
self.intro_lock = asyncio.Lock()
asyncio.get_event_loop().create_task(self.resume_playing_sound())
async def resume_playing_sound(self):
while not self.client.is_closed:
await self.intro_finished.wait()
await self.intro_counter_lock.acquire()
try:
self.intro_counter -= 1
if self.intro_counter == 0:
if self.ohm_server.active_player is not None:
self.ohm_server.active_player.resume()
finally:
self.intro_counter_lock.release()
self.intro_finished.clear()
def after_intro(self):
if self.ohm_server.intro_player.error:
print(self.ohm_server.intro_player.error)
traceback.print_exc()
self.client.loop.call_soon_threadsafe(self.intro_finished.set)
#print("Intro finished playing.")
@register_command("introstop", "stopintro", "is")
async def introstop(message, bot_channel, client):
await client.delete_message(message)
server = utils.get_server(message.server)
if server.intro_player is None or not server.intro_player.is_playing():
await client.send_message(bot_channel, '{}: No active intro to stop!'.format(message.author.name))
else:
await client.send_message(bot_channel, '{} stopped the intro!'.format(message.author.name))
server.intro_player.stop()
@register_command("intro", "i")
async def intro(message, bot_channel, client):
await client.delete_message(message)
server = utils.get_server(message.server)
if message.author.voice_channel is None or message.author.voice.is_afk:
return
member = server.get_member(message.author.id)
if message.author.name is not None and member.has_intro():
# Handles playing intros when the bot is summoned
if server.playlist.summoned_channel:
if message.author.voice.voice_channel == server.playlist.summoned_channel:
voice_channel = server.playlist.summoned_channel
else:
await client.send_message(bot_channel,
'{}: The bot is locked to channel {}. '
'Please join that channel to use !intro.'.format(
message.author.name, server.playlist.summoned_channel.name))
return
else:
voice_channel = message.author.voice_channel
voice_client = await utils.connect_to_voice(client, message.author.server, voice_channel)
# voice_client = client.voice_client_in(message.author.server)
'''
try:
if voice_client is None:
voice_client = await client.join_voice_channel(voice_channel)
elif voice_client.channel is None:
await voice_client.disconnect()
voice_client = await client.join_voice_channel(voice_channel)
elif voice_client.channel != voice_channel:
await voice_client.move_to(voice_channel)
except Exception as e:
print(e)
await client.send_message(bot_channel,
'{}: Could not connect to voice channel!'.format(message.author.name))
return
'''
if server.active_tts:
server.active_tts.stop()
server.tts_queue.clear()
if server.active_player is not None and server.active_player.is_playing():
server.active_player.pause()
if server.intro_player is not None and server.intro_player.is_playing():
server.intro_player.stop()
given_index = 0
try:
intro_list = os.listdir('servers/{}/members/{}/intros'.format(server.server_loc, member.member_loc))
try:
parameters = message.content.split()
if len(parameters) > 1:
given_index = int(parameters[1])
if given_index < 1:
# Because negative indices are valid in python,
# but not in our use case, we throw an error here
raise IndexError
else:
intro_index = given_index - 1
else:
raise ValueError
except ValueError:
intro_index = intro_list.index(random.choice(intro_list))
given_index = 0
server.intro_player = voice_client.create_ffmpeg_player(
'servers/{}/members/{}/intros/{}'.format(server.server_loc, member.member_loc,
intro_list[intro_index]),
after=server.intro_manager.after_intro)
await server.intro_manager.intro_counter_lock.acquire()
server.intro_manager.intro_counter += 1
server.intro_player.volume = 0.25
server.intro_player.start()
except IndexError:
await client.send_message(bot_channel,
'{}: The given index of {} is out of bounds!'.format(
message.author.name, given_index))
except NameError:
pass
except Exception as e:
print(e)
await client.send_message(bot_channel,
'{}: Could not play intro!'.format(message.author.name))
finally:
server.intro_manager.intro_counter_lock.release()
else:
await client.send_message(bot_channel,
'{}: You dont have an intro!'.format(message.author.name))
@register_command("myintros", "intros", "mi")
async def myintros(message, bot_channel, client):
await client.delete_message(message)
server = utils.get_server(message.server)
member = server.get_member(message.author.id)
intro_list = os.listdir('servers/{}/members/{}/intros'.format(server.server_loc, member.member_loc))
intro_print = str()
index_cnt = 1
for i in intro_list:
intro_print += '\n**[{}]**:\t{}'.format(index_cnt, i)
index_cnt += 1
await client.send_message(bot_channel,
'{}: Intro list:{}'.format(message.author.mention, intro_print))
@register_command("deleteintro", "introdelete")
async def deleteintro(message, bot_channel, client):
await client.delete_message(message)
parameters = message.content.split()
try:
if len(parameters) < 2:
raise Exception
intro_index = int(parameters[1])
if intro_index < 1 or intro_index > 5:
await client.send_message(bot_channel,
'{}: Index is out of bounds!'.format(message.author.name))
return
except:
await client.send_message(bot_channel,
'{}: Invalid parameter. Must be the index of the intro to delete!'.format(
message.author.name))
return
try:
server = utils.get_server(message.server)
member = server.get_member(message.author.id)
intro_list = os.listdir('servers/{}/members/{}/intros'.format(server.server_loc, member.member_loc))
await client.send_message(bot_channel,
'{}: Deleting intro {} at index {}'.format(
message.author.name, intro_list[intro_index - 1], intro_index))
os.remove(
'servers/{}/members/{}/intros/{}'.format(server.server_loc, member.member_loc, intro_list[intro_index - 1]))
except:
await client.send_message(bot_channel,
'{}: Could not remove file. No file found at given index.'.format(
message.author.name))
return
@register_command("upload", "up", "u")
async def upload(message, bot_channel, client):
if len(message.attachments) > 0:
try:
# regex function checks if the file is a file ending with .wav or .mp3
find_name = re.findall(r'([a-zA-Z\d_ .]+?.(?:wav|mp3))$', message.attachments[0]["filename"])
file_name = find_name.pop()
except IndexError:
await client.send_message(bot_channel, '{}: Invalid file or file format. Must be .wav or .mp3.'.format(message.author.name))
return
server = utils.get_server(message.server)
member = server.get_member(message.author.id)
intro_list = os.listdir('servers/{}/members/{}/intros'.format(server.server_loc, member.member_loc))
if (len(intro_list) + 1) > 3:
await client.send_message(bot_channel,
'{}: You have reached the maximum number of intros. '
'Please delete an intro before uploading a new one'.format(
message.author.name))
return
req = urllib.request.Request(message.attachments[0]["url"], headers={'User-Agent': 'Mozilla/5.0'})
with urllib.request.urlopen(req) as response, open(message.attachments[0]["filename"], 'wb') as out_file:
shutil.copyfileobj(response, out_file)
path = os.path.realpath(message.attachments[0]["filename"])
os.rename(path, 'servers/{}/members/{}/intros/{}'.format(server.server_loc, member.member_loc, message.attachments[0]["filename"]))
#TODO: Fix restrictions and check for valid file
await client.delete_message(message)
await client.send_message(bot_channel, '{}: Upload successful.'.format(message.author.name))
else:
await client.send_message(bot_channel, '{}: Please use this command while uploading a file to discord.'.format(message.author.name))
@register_command("defaultintro", "di")
async def default_intro(message, bot_channel, client):
await client.delete_message(message)
server = utils.get_server(message.server)
if message.author.voice_channel is None or message.author.voice.is_afk:
return
member = server.get_member(message.author.id)
if message.author.name is not None and member.has_intro:
# Handles playing intros when the bot is summoned
if server.playlist.summoned_channel:
if message.author.voice.voice_channel == server.playlist.summoned_channel:
voice_channel = server.playlist.summoned_channel
else:
await client.send_message(bot_channel,
'{}: The bot is locked to channel {}. '
'Please join that channel to use !intro.'.format(
message.author.name, server.playlist.summoned_channel.name))
return
else:
voice_channel = message.author.voice_channel
voice_client = client.voice_client_in(message.author.server)
try:
if voice_client is None:
voice_client = await client.join_voice_channel(voice_channel)
elif voice_client.channel is None:
await voice_client.disconnect()
voice_client = await client.join_voice_channel(voice_channel)
elif voice_client.channel != voice_channel:
await voice_client.move_to(voice_channel)
except Exception as e:
print(e)
await client.send_message(bot_channel,
'{}: Could not connect to voice channel!'.format(message.author.name))
return
if server.active_tts:
server.active_tts.stop()
server.tts_queue.clear()
if server.active_player is not None and server.active_player.is_playing():
server.active_player.pause()
if server.intro_player is not None and server.intro_player.is_playing():
server.intro_player.stop()
given_index = 0
try:
intro_list = os.listdir('servers/{}/default_intros'.format(server.server_loc))
if len(intro_list) == 0:
await client.send_message(bot_channel,
'{}: No default intros have been added!'.format(message.author.name))
return
try:
parameters = message.content.split()
if len(parameters) > 1:
given_index = int(parameters[1])
if given_index < 1:
# Because negative indices are valid in python,
# but not in our use case, we throw an error here
raise IndexError
else:
intro_index = given_index - 1
else:
raise ValueError
except ValueError:
intro_index = intro_list.index(random.choice(intro_list))
given_index = 0
server.intro_player = voice_client.create_ffmpeg_player(
'servers/{}/default_intros/{}'.format(server.server_loc, intro_list[intro_index]),
after=server.intro_manager.after_intro)
await server.intro_manager.intro_counter_lock.acquire()
server.intro_manager.intro_counter += 1
server.intro_player.volume = 0.25
server.intro_player.start()
except IndexError:
await client.send_message(bot_channel,
'{}: The given index of {} is out of bounds!'.format(
message.author.name, given_index))
except NameError:
pass
except Exception as e:
print(e)
await client.send_message(bot_channel,
'{}: Could not play intro!'.format(message.author.name))
finally:
server.intro_manager.intro_counter_lock.release()
else:
await client.send_message(bot_channel,
'{}: You dont have an intro!'.format(message.author.name))
@register_command("defaultintros", "dis", "ldi", "listdefaultintros")
async def list_default_intros(message, bot_channel, client):
await client.delete_message(message)
server = utils.get_server(message.server)
intro_list = os.listdir('servers/{}/default_intros'.format(server.server_loc))
intro_print = str()
index_cnt = 1
for i in intro_list:
intro_print += '\n**[{}]**:\t{}'.format(index_cnt, i)
index_cnt += 1
await client.send_message(bot_channel,
'{}: Default intro list:{}'.format(message.author.mention, intro_print))
@register_command("deletedefaultintro", "ddi")
async def delete_default_intro(message, bot_channel, client):
await client.delete_message(message)
parameters = message.content.split()
try:
if len(parameters) < 2:
raise Exception
intro_index = int(parameters[1])
if intro_index < 1 or intro_index > 5:
await client.send_message(bot_channel,
'{}: Index is out of bounds!'.format(message.author.name))
return
except:
await client.send_message(bot_channel,
'{}: Invalid parameter. Must be the index of the intro to delete!'.format(
message.author.name))
return
try:
server = utils.get_server(message.server)
intro_list = os.listdir('servers/{}/default_intros'.format(server.server_loc))
await client.send_message(bot_channel,
'{}: Deleting default intro {} at index {}'.format(
message.author.name, intro_list[intro_index - 1], intro_index))
os.remove(
'servers/{}/default_intros/{}'.format(server.server_loc, intro_list[intro_index - 1]))
except:
await client.send_message(bot_channel,
'{}: Could not remove file. No file found at given index.'.format(
message.author.name))
return
@register_command("uploaddefaultintro", "udi")
async def upload_default_intro(message, bot_channel, client):
if len(message.attachments) > 0:
try:
# regex function checks if the file is a file ending with .wav or .mp3
find_name = re.findall(r'([a-zA-Z\d_ .]+?.(?:wav|mp3))$', message.attachments[0]["filename"])
file_name = find_name.pop()
except IndexError:
await client.send_message(bot_channel, '{}: Invalid file or file format. Must be .wav or .mp3.'.format(
message.author.name))
return
server = utils.get_server(message.server)
intro_list = os.listdir('servers/{}/default_intros'.format(server.server_loc))
if (len(intro_list) + 1) > 3:
await client.send_message(bot_channel,
'{}: You have reached the maximum number of default intros. '
'Please delete an intro before uploading a new one.'.format(
message.author.name))
return
req = urllib.request.Request(message.attachments[0]["url"], headers={'User-Agent': 'Mozilla/5.0'})
with urllib.request.urlopen(req) as response, open(message.attachments[0]["filename"], 'wb') as out_file:
shutil.copyfileobj(response, out_file)
path = os.path.realpath(message.attachments[0]["filename"])
os.rename(path, 'servers/{}/default_intros/{}'.format(server.server_loc, message.attachments[0]["filename"]))
# TODO: Fix restrictions and check for valid file
await client.delete_message(message)
await client.send_message(bot_channel, '{}: Upload successful.'.format(message.author.name))
else:
await client.send_message(bot_channel, '{}: Please use this command while uploading a file to discord.'.format(
message.author.name))
| 46.14321
| 140
| 0.591182
| 2,104
| 18,688
| 5.070817
| 0.110741
| 0.059706
| 0.063736
| 0.063924
| 0.888837
| 0.88162
| 0.865217
| 0.855938
| 0.848439
| 0.848439
| 0
| 0.005125
| 0.310948
| 18,688
| 404
| 141
| 46.257426
| 0.823406
| 0.032641
| 0
| 0.742515
| 0
| 0
| 0.121976
| 0.02235
| 0
| 0
| 0
| 0.002475
| 0
| 1
| 0.005988
| false
| 0.005988
| 0.02994
| 0
| 0.086826
| 0.032934
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
467b8ed2a4241142dfa135aa895ac0fbfcb5a6cd
| 50,146
|
py
|
Python
|
tests/build/test_build_request.py
|
jpopelka/osbs-client
|
a8a926ef34b44a52da42fa07b3847efaaf06b12a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/build/test_build_request.py
|
jpopelka/osbs-client
|
a8a926ef34b44a52da42fa07b3847efaaf06b12a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/build/test_build_request.py
|
jpopelka/osbs-client
|
a8a926ef34b44a52da42fa07b3847efaaf06b12a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import copy
import json
import os
from pkg_resources import parse_version
import shutil
from osbs.build.build_request import BuildManager, BuildRequest, ProductionBuild
from osbs.constants import (PROD_BUILD_TYPE, PROD_WITHOUT_KOJI_BUILD_TYPE,
PROD_WITH_SECRET_BUILD_TYPE)
from osbs.exceptions import OsbsValidationException
from flexmock import flexmock
import pytest
from tests.constants import (INPUTS_PATH, TEST_BUILD_CONFIG, TEST_BUILD_JSON, TEST_COMPONENT,
TEST_GIT_BRANCH, TEST_GIT_REF, TEST_GIT_URI)
class NoSuchPluginException(Exception):
pass
def get_plugin(plugins, plugin_type, plugin_name):
plugins = plugins[plugin_type]
for plugin in plugins:
if plugin["name"] == plugin_name:
return plugin
else:
raise NoSuchPluginException()
def plugin_value_get(plugins, plugin_type, plugin_name, *args):
result = get_plugin(plugins, plugin_type, plugin_name)
for arg in args:
result = result[arg]
return result
class TestBuildRequest(object):
def test_build_request_is_auto_instantiated(self):
build_json = copy.deepcopy(TEST_BUILD_JSON)
br = BuildRequest('something')
flexmock(br).should_receive('template').and_return(build_json)
assert br.is_auto_instantiated() is True
def test_build_request_isnt_auto_instantiated(self):
build_json = copy.deepcopy(TEST_BUILD_JSON)
build_json['spec']['triggers'] = []
br = BuildRequest('something')
flexmock(br).should_receive('template').and_return(build_json)
assert br.is_auto_instantiated() is False
def test_render_simple_request_incorrect_postbuild(self, tmpdir):
# Make temporary copies of the JSON files
for basename in ['simple.json', 'simple_inner.json']:
shutil.copy(os.path.join(INPUTS_PATH, basename),
os.path.join(str(tmpdir), basename))
# Create an inner JSON description which incorrectly runs the exit
# plugins as postbuild plugins.
with open(os.path.join(str(tmpdir), 'simple_inner.json'), 'r+') as inner:
inner_json = json.load(inner)
# Re-write all the exit plugins as postbuild plugins
exit_plugins = inner_json['exit_plugins']
inner_json['postbuild_plugins'].extend(exit_plugins)
del inner_json['exit_plugins']
inner.seek(0)
json.dump(inner_json, inner)
inner.truncate()
bm = BuildManager(str(tmpdir))
build_request = bm.get_build_request_by_type("simple")
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'user': "john-foo",
'component': "component",
'registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
}
build_request.set_params(**kwargs)
build_json = build_request.render()
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
# Check the store_metadata_in_osv3's uri parameter was set
# correctly, even though it was listed as a postbuild plugin.
assert plugin_value_get(plugins, "postbuild_plugins", "store_metadata_in_osv3", "args", "url") == \
"http://openshift/"
@pytest.mark.parametrize('tag', [
None,
"some_tag",
])
@pytest.mark.parametrize('registry_uris', [
[],
["registry.example.com:5000"],
["registry.example.com:5000", "localhost:6000"],
])
def test_render_simple_request(self, tag, registry_uris):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type("simple")
name_label = "fedora/resultingimage"
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'user': "john-foo",
'component': TEST_COMPONENT,
'registry_uris': registry_uris,
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'tag': tag,
}
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["metadata"]["name"] is not None
assert "triggers" not in build_json["spec"]
assert build_json["spec"]["source"]["git"]["uri"] == TEST_GIT_URI
assert build_json["spec"]["source"]["git"]["ref"] == TEST_GIT_REF
expected_output = "john-foo/component:%s" % (tag if tag else "20")
if registry_uris:
expected_output = registry_uris[0] + "/" + expected_output
assert build_json["spec"]["output"]["to"]["name"].startswith(expected_output)
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
pull_base_image = get_plugin(plugins, "prebuild_plugins",
"pull_base_image")
assert pull_base_image is not None
assert ('args' not in pull_base_image or
'parent_registry' not in pull_base_image['args'])
assert plugin_value_get(plugins, "exit_plugins", "store_metadata_in_osv3", "args", "url") == \
"http://openshift/"
for r in registry_uris:
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries", r) == {"insecure": True}
@pytest.mark.parametrize('architecture', [
None,
'x86_64',
])
def test_render_prod_request_with_repo(self, architecture):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
name_label = "fedora/resultingimage"
assert isinstance(build_request, ProductionBuild)
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uri': "registry.example.com",
'source_registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': architecture,
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'yum_repourls': ["http://example.com/my.repo"],
'registry_api_versions': ['v1'],
}
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["metadata"]["name"] == TEST_BUILD_CONFIG
assert "triggers" not in build_json["spec"]
assert build_json["spec"]["source"]["git"]["uri"] == TEST_GIT_URI
assert build_json["spec"]["source"]["git"]["ref"] == TEST_GIT_REF
assert build_json["spec"]["output"]["to"]["name"].startswith(
"registry.example.com/john-foo/component:"
)
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
assert plugin_value_get(plugins, "prebuild_plugins", "distgit_fetch_artefacts",
"args", "command") == "make"
assert plugin_value_get(plugins, "prebuild_plugins", "pull_base_image",
"args", "parent_registry") == "registry.example.com"
assert plugin_value_get(plugins, "exit_plugins", "store_metadata_in_osv3",
"args", "url") == "http://openshift/"
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries", "registry.example.com") == {"insecure": True}
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "koji")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_push")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
assert 'sourceSecret' not in build_json["spec"]["source"]
assert plugin_value_get(plugins, "prebuild_plugins", "add_yum_repo_by_url",
"args", "repourls") == ["http://example.com/my.repo"]
labels = plugin_value_get(plugins, "prebuild_plugins", "add_labels_in_dockerfile",
"args", "labels")
assert labels is not None
assert labels['Authoritative_Registry'] is not None
assert labels['Build_Host'] is not None
assert labels['Vendor'] is not None
assert labels['distribution-scope'] is not None
if architecture:
assert labels['Architecture'] is not None
else:
assert 'Architecture' not in labels
def test_render_prod_request(self):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
name_label = "fedora/resultingimage"
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uri': "registry.example.com",
'source_registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
'pdc_url': 'https://pdc.example.com',
'smtp_uri': 'smtp.example.com',
}
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["metadata"]["name"] == TEST_BUILD_CONFIG
assert "triggers" not in build_json["spec"]
assert build_json["spec"]["source"]["git"]["uri"] == TEST_GIT_URI
assert build_json["spec"]["source"]["git"]["ref"] == TEST_GIT_REF
assert build_json["spec"]["output"]["to"]["name"].startswith(
"registry.example.com/john-foo/component:"
)
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
assert plugin_value_get(plugins, "prebuild_plugins", "distgit_fetch_artefacts",
"args", "command") == "make"
assert plugin_value_get(plugins, "prebuild_plugins", "pull_base_image", "args",
"parent_registry") == "registry.example.com"
assert plugin_value_get(plugins, "exit_plugins", "store_metadata_in_osv3",
"args", "url") == "http://openshift/"
assert plugin_value_get(plugins, "prebuild_plugins", "koji",
"args", "root") == "http://root/"
assert plugin_value_get(plugins, "prebuild_plugins", "koji",
"args", "target") == "koji-target"
assert plugin_value_get(plugins, "prebuild_plugins", "koji",
"args", "hub") == "http://hub/"
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries", "registry.example.com") == {"insecure": True}
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_push")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
assert 'sourceSecret' not in build_json["spec"]["source"]
labels = plugin_value_get(plugins, "prebuild_plugins", "add_labels_in_dockerfile",
"args", "labels")
assert labels is not None
assert labels['Architecture'] is not None
assert labels['Authoritative_Registry'] is not None
assert labels['Build_Host'] is not None
assert labels['Vendor'] is not None
assert labels['distribution-scope'] is not None
def test_render_prod_without_koji_request(self):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_WITHOUT_KOJI_BUILD_TYPE)
name_label = "fedora/resultingimage"
assert isinstance(build_request, ProductionBuild)
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uri': "registry.example.com",
'source_registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
}
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["metadata"]["name"] == TEST_BUILD_CONFIG
assert "triggers" not in build_json["spec"]
assert build_json["spec"]["source"]["git"]["uri"] == TEST_GIT_URI
assert build_json["spec"]["source"]["git"]["ref"] == TEST_GIT_REF
assert build_json["spec"]["output"]["to"]["name"].startswith(
"registry.example.com/john-foo/component:none-"
)
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
assert plugin_value_get(plugins, "prebuild_plugins", "distgit_fetch_artefacts",
"args", "command") == "make"
assert plugin_value_get(plugins, "prebuild_plugins", "pull_base_image", "args",
"parent_registry") == "registry.example.com"
assert plugin_value_get(plugins, "exit_plugins", "store_metadata_in_osv3",
"args", "url") == "http://openshift/"
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries", "registry.example.com") == {"insecure": True}
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "koji")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_push")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
assert 'sourceSecret' not in build_json["spec"]["source"]
labels = plugin_value_get(plugins, "prebuild_plugins", "add_labels_in_dockerfile",
"args", "labels")
assert labels is not None
assert labels['Architecture'] is not None
assert labels['Authoritative_Registry'] is not None
assert labels['Build_Host'] is not None
assert labels['Vendor'] is not None
assert labels['distribution-scope'] is not None
def test_render_prod_with_secret_request(self):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_WITH_SECRET_BUILD_TYPE)
assert isinstance(build_request, ProductionBuild)
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': 'fedora/resultingimage',
'registry_uri': "",
'pulp_registry': "registry.example.com",
'nfs_server_path': "server:path",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
'source_secret': 'mysecret',
}
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["spec"]["source"]["sourceSecret"]["name"] == "mysecret"
strategy = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in strategy:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
assert get_plugin(plugins, "prebuild_plugins", "koji")
assert get_plugin(plugins, "postbuild_plugins", "pulp_push")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
assert get_plugin(plugins, "postbuild_plugins", "cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries") == {}
def test_render_prod_request_requires_newer(self):
"""
We should get an OsbsValidationException when trying to use the
sendmail plugin without requiring OpenShift 1.0.6, as
configuring the plugin requires the new-style secrets.
"""
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_WITH_SECRET_BUILD_TYPE)
name_label = "fedora/resultingimage"
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uris': ["registry1.example.com/v1", # first is primary
"registry2.example.com/v2"],
'nfs_server_path': "server:path",
'source_registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'pdc_secret': 'foo',
'pdc_url': 'https://pdc.example.com',
'smtp_uri': 'smtp.example.com',
}
build_request.set_params(**kwargs)
with pytest.raises(OsbsValidationException):
build_request.render()
@pytest.mark.parametrize('registry_api_versions', [
['v1', 'v2'],
['v2'],
])
@pytest.mark.parametrize('openshift_version', ['1.0.0', '1.0.6'])
def test_render_prod_request_v1_v2(self, registry_api_versions, openshift_version):
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_WITH_SECRET_BUILD_TYPE)
build_request.set_openshift_required_version(parse_version(openshift_version))
name_label = "fedora/resultingimage"
pulp_env = 'v1pulp'
pulp_secret = pulp_env + 'secret'
kwargs = {
'pulp_registry': pulp_env,
'pulp_secret': pulp_secret,
}
kwargs.update({
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uris': [
# first is primary
"http://registry1.example.com:5000/v1",
"http://registry2.example.com:5000/v2"
],
'nfs_server_path': "server:path",
'source_registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': registry_api_versions,
})
build_request.set_params(**kwargs)
build_json = build_request.render()
assert build_json["metadata"]["name"] == TEST_BUILD_CONFIG
assert "triggers" not in build_json["spec"]
assert build_json["spec"]["source"]["git"]["uri"] == TEST_GIT_URI
assert build_json["spec"]["source"]["git"]["ref"] == TEST_GIT_REF
# Pulp used, so no direct registry output
assert build_json["spec"]["output"]["to"]["name"].startswith(
"john-foo/component:"
)
env_vars = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env_vars:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
# tag_and_push configuration. Must not have the scheme part.
expected_registries = {
'registry2.example.com:5000': {'insecure': True},
}
if 'v1' in registry_api_versions:
expected_registries['registry1.example.com:5000'] = {
'insecure': True,
}
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push",
"args", "registries") == expected_registries
if openshift_version == '1.0.0':
assert 'secrets' not in build_json['spec']['strategy']['customStrategy']
assert build_json['spec']['source']['sourceSecret']['name'] == pulp_secret
else:
assert 'sourceSecret' not in build_json['spec']['source']
secrets = build_json['spec']['strategy']['customStrategy']['secrets']
for version, plugin in [('v1', 'pulp_push'), ('v2', 'pulp_sync')]:
if version not in registry_api_versions:
continue
path = plugin_value_get(plugins, "postbuild_plugins", plugin,
"args", "pulp_secret_path")
pulp_secrets = [secret for secret in secrets if secret['mountPath'] == path]
assert len(pulp_secrets) == 1
assert pulp_secrets[0]['secretSource']['name'] == pulp_secret
if 'v1' in registry_api_versions:
assert get_plugin(plugins, "postbuild_plugins",
"compress")
assert get_plugin(plugins, "postbuild_plugins",
"cp_built_image_to_nfs")
assert get_plugin(plugins, "postbuild_plugins",
"pulp_push")
assert plugin_value_get(plugins, "postbuild_plugins", "pulp_push",
"args", "pulp_registry_name") == pulp_env
else:
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins",
"compress")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins",
"cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins",
"pulp_push")
if 'v2' in registry_api_versions:
assert get_plugin(plugins, "postbuild_plugins", "pulp_sync")
env = plugin_value_get(plugins, "postbuild_plugins", "pulp_sync",
"args", "pulp_registry_name")
assert env == pulp_env
docker_registry = plugin_value_get(plugins, "postbuild_plugins",
"pulp_sync", "args",
"docker_registry")
# pulp_sync config must have the scheme part to satisfy pulp.
assert docker_registry == 'http://registry2.example.com:5000'
else:
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
def test_render_with_yum_repourls(self):
bm = BuildManager(INPUTS_PATH)
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': 'fedora/resultingimage',
'registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
}
build_request = bm.get_build_request_by_type("prod")
# Test validation for yum_repourls parameter
kwargs['yum_repourls'] = 'should be a list'
with pytest.raises(OsbsValidationException):
build_request.set_params(**kwargs)
# Use a valid yum_repourls parameter and check the result
kwargs['yum_repourls'] = ['http://example.com/repo1.repo', 'http://example.com/repo2.repo']
build_request.set_params(**kwargs)
build_json = build_request.render()
strategy = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in strategy:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
repourls = None
for d in plugins['prebuild_plugins']:
if d['name'] == 'add_yum_repo_by_url':
repourls = d['args']['repourls']
assert repourls is not None
assert len(repourls) == 2
assert 'http://example.com/repo1.repo' in repourls
assert 'http://example.com/repo2.repo' in repourls
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "koji")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "cp_built_image_to_nfs")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_push")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "pulp_sync")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
def test_render_prod_with_pulp_no_auth(self):
"""
Rendering should fail if pulp is specified but auth config isn't
"""
bm = BuildManager(INPUTS_PATH)
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': 'fedora/resultingimage',
'registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'pulp_registry': "foo",
}
build_request.set_params(**kwargs)
with pytest.raises(OsbsValidationException):
build_request.render()
@staticmethod
def create_image_change_trigger_json(outdir):
"""
Create JSON templates with an image change trigger added.
:param outdir: str, path to store modified templates
"""
# Make temporary copies of the JSON files
for basename in ['prod.json', 'prod_inner.json']:
shutil.copy(os.path.join(INPUTS_PATH, basename),
os.path.join(outdir, basename))
# Create a build JSON description with an image change trigger
with open(os.path.join(outdir, 'prod.json'), 'r+') as prod_json:
build_json = json.load(prod_json)
# Add the image change trigger
build_json['spec']['triggers'] = [
{
"type": "ImageChange",
"imageChange": {
"from": {
"kind": "ImageStreamTag",
"name": "{{BASE_IMAGE_STREAM}}"
}
}
}
]
prod_json.seek(0)
json.dump(build_json, prod_json)
prod_json.truncate()
@pytest.mark.parametrize(('registry_uri', 'insecure_registry'), [
("https://registry.example.com", False),
("http://registry.example.com", True),
])
@pytest.mark.parametrize('branchref', [
# Wrong way round
{
'git_ref': TEST_GIT_BRANCH,
'git_branch': TEST_GIT_REF,
'should_raise': True,
},
# Right way round
{
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'should_raise': False,
},
])
def test_render_prod_request_with_trigger(self, tmpdir, branchref,
registry_uri, insecure_registry):
self.create_image_change_trigger_json(str(tmpdir))
bm = BuildManager(str(tmpdir))
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
# We're using both pulp and sendmail, both of which require a
# Kubernetes secret. This isn't supported until OpenShift
# Origin 1.0.6.
build_request.set_openshift_required_version(parse_version('1.0.6'))
name_label = "fedora/resultingimage"
push_url = "ssh://{username}git.example.com/git/{component}.git"
pdc_secret_name = 'foo'
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': branchref['git_ref'],
'git_branch': branchref['git_branch'],
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': name_label,
'registry_uri': registry_uri,
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
'git_push_url': push_url.format(username='', component=TEST_COMPONENT),
'git_push_username': 'example',
'pdc_secret': pdc_secret_name,
'pdc_url': 'https://pdc.example.com',
'smtp_uri': 'smtp.example.com',
}
build_request.set_params(**kwargs)
if branchref['should_raise']:
with pytest.raises(OsbsValidationException):
build_request.render()
return
else:
build_json = build_request.render()
assert "triggers" in build_json["spec"]
assert build_json["spec"]["triggers"][0]["imageChange"]["from"]["name"] == 'fedora:latest'
strategy = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in strategy:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
plugins = json.loads(plugins_json)
assert get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
assert get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
assert plugin_value_get(plugins, "prebuild_plugins",
"check_and_set_rebuild", "args",
"url") == kwargs["openshift_uri"]
assert get_plugin(plugins, "prebuild_plugins", "bump_release")
assert plugin_value_get(plugins, "prebuild_plugins", "bump_release", "args",
"git_ref") == TEST_GIT_REF
assert plugin_value_get(plugins, "prebuild_plugins", "bump_release", "args",
"push_url") == push_url.format(username='example@',
component=TEST_COMPONENT)
assert get_plugin(plugins, "postbuild_plugins", "import_image")
assert plugin_value_get(plugins,
"postbuild_plugins", "import_image", "args",
"imagestream") == name_label.replace('/', '-')
expected_repo = os.path.join(kwargs["registry_uri"], name_label)
expected_repo = expected_repo.replace('https://', '')
expected_repo = expected_repo.replace('http://', '')
assert plugin_value_get(plugins,
"postbuild_plugins", "import_image", "args",
"docker_image_repo") == expected_repo
assert plugin_value_get(plugins,
"postbuild_plugins", "import_image", "args",
"url") == kwargs["openshift_uri"]
if insecure_registry:
assert plugin_value_get(plugins,
"postbuild_plugins", "import_image", "args",
"insecure_registry")
else:
with pytest.raises(KeyError):
plugin_value_get(plugins,
"postbuild_plugins", "import_image", "args",
"insecure_registry")
assert plugin_value_get(plugins, "postbuild_plugins", "tag_and_push", "args",
"registries", "registry.example.com") == {"insecure": True}
assert get_plugin(plugins, "exit_plugins", "koji_promote")
assert plugin_value_get(plugins, "exit_plugins", "koji_promote",
"args", "kojihub") == kwargs["kojihub"]
assert plugin_value_get(plugins, "exit_plugins", "koji_promote",
"args", "url") == kwargs["openshift_uri"]
with pytest.raises(KeyError):
plugin_value_get(plugins, 'exit_plugins', 'koji_promote',
'args', 'metadata_only') # v1 enabled by default
pdc_secret = [secret for secret in
build_json['spec']['strategy']['customStrategy']['secrets']
if secret['secretSource']['name'] == pdc_secret_name]
mount_path = pdc_secret[0]['mountPath']
expected = {'args': {'from_address': 'osbs@example.com',
'url': 'http://openshift/',
'pdc_url': 'https://pdc.example.com',
'pdc_secret_path': mount_path,
'send_on': ['auto_fail', 'auto_success'],
'error_addresses': ['errors@example.com'],
'smtp_uri': 'smtp.example.com',
'submitter': 'john-foo'},
'name': 'sendmail'}
assert get_plugin(plugins, 'exit_plugins', 'sendmail') == expected
@pytest.mark.parametrize('missing', [
'git_branch',
'git_push_url',
])
def test_render_prod_request_trigger_missing_param(self, tmpdir, missing):
self.create_image_change_trigger_json(str(tmpdir))
bm = BuildManager(str(tmpdir))
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
push_url = "ssh://{username}git.example.com/git/{component}.git"
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': 'fedora/resultingimage',
'registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'koji_target': "koji-target",
'kojiroot': "http://root/",
'kojihub': "http://hub/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
'git_push_url': push_url.format(username='', component=TEST_COMPONENT),
'git_push_username': 'example',
}
# Remove one of the parameters required for rebuild triggers
del kwargs[missing]
build_request.set_params(**kwargs)
build_json = build_request.render()
# Verify the triggers are now disabled
assert "triggers" not in build_json["spec"]
strategy = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in strategy:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
# Verify the rebuild plugins are all disabled
plugins = json.loads(plugins_json)
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "check_and_set_rebuild")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins",
"stop_autorebuild_if_disabled")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "prebuild_plugins", "bump_release")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "postbuild_plugins", "import_image")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "koji_promote")
with pytest.raises(NoSuchPluginException):
get_plugin(plugins, "exit_plugins", "sendmail")
def test_render_prod_request_new_secrets(self, tmpdir):
bm = BuildManager(INPUTS_PATH)
secret_name = 'mysecret'
kwargs = {
'git_uri': TEST_GIT_URI,
'git_ref': TEST_GIT_REF,
'git_branch': TEST_GIT_BRANCH,
'user': "john-foo",
'component': TEST_COMPONENT,
'base_image': 'fedora:latest',
'name_label': "fedora/resultingimage",
'registry_uri': "registry.example.com",
'openshift_uri': "http://openshift/",
'builder_openshift_url': "http://openshift/",
'sources_command': "make",
'architecture': "x86_64",
'vendor': "Foo Vendor",
'build_host': "our.build.host.example.com",
'authoritative_registry': "registry.example.com",
'distribution_scope': "authoritative-source-only",
'registry_api_versions': ['v1'],
'pulp_registry': 'foo',
'pulp_secret': secret_name,
}
# Default required version (0.5.4), implicitly and explicitly
for required in (None, parse_version('0.5.4')):
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
if required is not None:
build_request.set_openshift_required_version(required)
build_request.set_params(**kwargs)
build_json = build_request.render()
# Using the sourceSecret scheme
assert 'sourceSecret' in build_json['spec']['source']
assert build_json['spec']['source']\
['sourceSecret']['name'] == secret_name
# Not using the secrets array scheme
assert 'secrets' not in build_json['spec']['strategy']['customStrategy']
# We shouldn't have pulp_secret_path set
env = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
assert 'pulp_secret_path' not in plugin_value_get(plugins,
'postbuild_plugins',
'pulp_push',
'args')
# Set required version to 1.0.6
build_request = bm.get_build_request_by_type(PROD_BUILD_TYPE)
build_request.set_openshift_required_version(parse_version('1.0.6'))
build_json = build_request.render()
# Not using the sourceSecret scheme
assert 'sourceSecret' not in build_json['spec']['source']
# Using the secrets array scheme instead
assert 'secrets' in build_json['spec']['strategy']['customStrategy']
secrets = build_json['spec']['strategy']['customStrategy']['secrets']
pulp_secret = [secret for secret in secrets
if secret['secretSource']['name'] == secret_name]
assert len(pulp_secret) > 0
assert 'mountPath' in pulp_secret[0]
# Check that the secret's mountPath matches the plugin's
# configured path for the secret
mount_path = pulp_secret[0]['mountPath']
env = build_json['spec']['strategy']['customStrategy']['env']
plugins_json = None
for d in env:
if d['name'] == 'DOCK_PLUGINS':
plugins_json = d['value']
break
assert plugins_json is not None
plugins = json.loads(plugins_json)
assert plugin_value_get(plugins, 'postbuild_plugins', 'pulp_push',
'args', 'pulp_secret_path') == mount_path
| 44.733274
| 107
| 0.584872
| 5,155
| 50,146
| 5.407371
| 0.068671
| 0.025507
| 0.041327
| 0.074332
| 0.814206
| 0.775821
| 0.745507
| 0.717776
| 0.708951
| 0.680969
| 0
| 0.004642
| 0.295437
| 50,146
| 1,120
| 108
| 44.773214
| 0.784325
| 0.038328
| 0
| 0.707724
| 0
| 0
| 0.278124
| 0.057912
| 0
| 0
| 0
| 0
| 0.138831
| 1
| 0.018789
| false
| 0.001044
| 0.024008
| 0
| 0.048017
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4699eb9a64a85d66c07efe91802f6f8f46d7ba5d
| 32
|
py
|
Python
|
test.py
|
AksultanMukhanbet/proctoring_intellectual_part
|
f85db9d31025cb57a732f64ab22358651bc93c69
|
[
"MIT"
] | null | null | null |
test.py
|
AksultanMukhanbet/proctoring_intellectual_part
|
f85db9d31025cb57a732f64ab22358651bc93c69
|
[
"MIT"
] | null | null | null |
test.py
|
AksultanMukhanbet/proctoring_intellectual_part
|
f85db9d31025cb57a732f64ab22358651bc93c69
|
[
"MIT"
] | null | null | null |
print('3816560116110.mp4'[:-13])
| 32
| 32
| 0.71875
| 4
| 32
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0
| 32
| 1
| 32
| 32
| 0.21875
| 0
| 0
| 0
| 0
| 0
| 0.515152
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
315680f6b78c092b3932e67e0aa4a1ccf3e4bd0c
| 3,486
|
py
|
Python
|
hotel/migrations/0001_initial.py
|
sharmautkarsh31/hotel_power_management
|
777d8284f82969fbed80a4275239d0c6a2642bdf
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
hotel/migrations/0001_initial.py
|
sharmautkarsh31/hotel_power_management
|
777d8284f82969fbed80a4275239d0c6a2642bdf
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
hotel/migrations/0001_initial.py
|
sharmautkarsh31/hotel_power_management
|
777d8284f82969fbed80a4275239d0c6a2642bdf
|
[
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-05-17 02:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Floor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Hotel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='SubCorridor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('floor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.floor')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MainCorridor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('floor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.floor')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Light',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('consumption_unit', models.IntegerField(default=5)),
('turned_on', models.BooleanField(default=True)),
('main_corridor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.maincorridor')),
('sub_corridor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.subcorridor')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='floor',
name='hotel',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.hotel'),
),
migrations.CreateModel(
name='AirConditioner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('consumption_unit', models.IntegerField(default=5)),
('turned_on', models.BooleanField(default=True)),
('main_corridor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.maincorridor')),
('sub_corridor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hotel.subcorridor')),
],
options={
'abstract': False,
},
),
]
| 41.011765
| 123
| 0.557372
| 331
| 3,486
| 5.749245
| 0.196375
| 0.037835
| 0.058854
| 0.092486
| 0.834472
| 0.834472
| 0.834472
| 0.834472
| 0.834472
| 0.834472
| 0
| 0.014403
| 0.302926
| 3,486
| 84
| 124
| 41.5
| 0.768724
| 0.012909
| 0
| 0.714286
| 1
| 0
| 0.103228
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025974
| 0
| 0.077922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
319d435d92637d1947f6edccb2caa90ba2e50246
| 85,000
|
py
|
Python
|
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/modules/eos_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/modules/eos_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/arista/eos/plugins/modules/eos_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2020 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for eos_bgp_global
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: eos_bgp_global
short_description: Manages BGP global resource module
description: This module configures and manages the attributes of BGP global on Arista
EOS platforms.
version_added: 1.4.0
author: Gomathi Selvi Srinivasan (@GomathiselviS)
notes:
- Tested against Arista EOS 4.23.0F
- This module works with connection C(network_cli). See the L(EOS Platform Options,eos_platform_options).
options:
config:
description: A list of configurations for BGP global.
type: dict
suboptions:
as_number:
description: Autonomous system number.
type: str
aggregate_address:
description: Configure aggregate address.
type: list
elements: dict
suboptions:
address:
description: ipv4/ipv6 address prefix.
type: str
advertise_only:
description: Advertise without installing the generated blackhole route in
FIB.
type: bool
as_set:
description: Generate autonomous system set path information.
type: bool
attribute_map:
description: Name of the route map used to set the attribute of the
aggregate route.
type: str
match_map:
description: Name of the route map used to filter the contributors of the
aggregate route.
type: str
summary_only:
description: Filters all more-specific routes from updates.
type: bool
bgp_params:
description: BGP parameters.
type: dict
suboptions:
additional_paths:
description: BGP additional-paths commands
type: str
choices: ['install', 'send', 'receive']
advertise_inactive:
description: Advertise BGP routes even if they are inactive in RIB.
type: bool
allowas_in:
description: Allow local-as in updates.
type: dict
suboptions:
set:
description: When True, it is set.
type: bool
count:
description: Number of local ASNs allowed in a BGP update.
type: int
always_compare_med:
description: BGP Always Compare MED
type: bool
asn:
description: AS Number notation.
type: str
choices: ['asdot', 'asplain']
auto_local_addr:
description: Automatically determine the local address to be used
for the non-transport AF.
type: bool
bestpath:
description: Select the bestpath selection algorithim for BGP routes.
type: dict
suboptions:
as_path:
description: Select the bestpath selection based on as-path.
type: str
choices: ['ignore', 'multipath_relax']
ecmp_fast:
description: Tie-break BGP paths in a ECMP group based on the order of arrival.
type: bool
med:
description: MED attribute
type: dict
suboptions:
confed:
description: MED Confed.
type: bool
missing_as_worst:
description: MED missing-as-worst.
type: bool
skip:
description: skip one of the tie breaking rules in the bestpath selection.
type: bool
tie_break:
description: Configure the tie-break option for BGP bestpath selection.
choices: ['cluster_list_length', 'router_id']
type: str
client_to_client:
description: client to client configuration.
type: bool
cluster_id:
description: Cluster ID of this router acting as a route reflector.
type: str
confederation:
description: confederation.
type: dict
suboptions:
identifier:
description: Confederation identifier.
type: str
peers:
description: Confederation peers.
type: str
control_plan_filter:
description: Control plane filter for BGP.
type: bool
convergence:
description: Bgp convergence parameters.
type: dict
suboptions:
slow_peer:
description: Maximum amount of time to wait for slow peers to estabilsh session.
type: bool
time:
description: time in secs
type: int
default:
description: Default neighbor configuration commands.
type: str
choices: ['ipv4_unicast', 'ipv6_unicast']
enforce_first_as:
description: Enforce the First AS for EBGP routes(default).
type: bool
host_routes:
description: BGP host routes configuration.
type: bool
labeled_unicast:
description: Labeled Unicast.
type: str
choices: ['ip', 'tunnel']
listen:
description: BGP listen.
type: dict
suboptions:
limit:
description: Set limit on the number of dynamic BGP peers allowed.
type: int
range:
description: Subnet Range to be associated with the peer-group.
type: dict
suboptions:
address:
description: Address prefix
type: str
peer_group:
description: Name of peer group.
type: dict
suboptions:
name:
description: name.
type: str
peer_filter:
description: Name of peer filter.
type: str
remote_as:
description: Neighbor AS number
type: str
log_neighbor_changes:
description: Log neighbor up/down events.
type: bool
missing_policy:
description: Missing policy override configuration commands.
type: dict
suboptions:
direction:
description: Missing policy direction options.
type: str
choices: ['in', 'out']
action:
description: Missing policy action options.
type: str
choices: ['deny', 'permit', 'deny-in-out']
monitoring:
description: Enable Bgp monitoring for all/specified stations.
type: bool
next_hop_unchanged:
description: Preserve original nexthop while advertising routes to
eBGP peers.
type: bool
redistribute_internal:
description: Redistribute internal BGP routes.
type: bool
route:
description: Configure route-map for route installation.
type: str
route_reflector:
description: Configure route reflector options
type: dict
suboptions:
set:
description: When True route_reflector is set.
type: bool
preserve:
description: preserve route attributes, overwriting route-map changes
type: bool
transport:
description: Configure transport port for TCP session
type: int
default_metric:
description: Default metric.
type: int
distance:
description: Define an administrative distance.
type: dict
suboptions:
external:
description: distance for external routes.
type: int
internal:
description: distance for internal routes.
type: int
local:
description: distance for local routes.
type: int
graceful_restart:
description: Enable graceful restart mode.
type: dict
suboptions:
set:
description: When True, graceful restart is set.
type: bool
restart_time:
description: Set the max time needed to restart and come back up.
type: int
stalepath_time:
description: Set the max time to hold onto restarting peer stale paths.
type: int
graceful_restart_helper:
description: Enable graceful restart helper mode.
type: bool
access_group:
description: ip/ipv6 access list configuration.
type: dict
suboptions:
afi:
description: Specify ip/ipv6.
type: str
choices: ['ipv4', 'ipv6']
acl_name:
description: access list name.
type: str
direction:
description: direction of packets.
type: str
maximum_paths:
description: Maximum number of equal cost paths.
type: dict
suboptions:
max_equal_cost_paths:
description: Value for maximum number of equal cost paths.
type: int
max_installed_ecmp_paths:
description: Value for maximum number of installed ECMP routes.
type: int
monitoring:
description: BGP monitoring protocol configuration.
type: dict
suboptions:
port:
description: Configure the BGP monitoring protocol port number <1024-65535>.
type: int
received:
description: BGP monitoring protocol received route selection.
type: str
choices: ['post_policy', 'pre_policy']
station:
description: BGP monitoring station configuration.
type: str
timestamp:
description: BGP monitoring protocol Per-Peer Header timestamp behavior.
type: str
choices: ['none', 'send_time']
neighbor:
description: Configure routing for a network.
type: list
elements: dict
suboptions:
peer:
type: str
description: Neighbor address or peer-group.
additional_paths:
description: BGP additional-paths commands.
type: str
choices: ['send', 'receive']
allowas_in:
description: Allow local-as in updates.
type: dict
suboptions:
set:
description: When True, it is set.
type: bool
count:
description: Number of local ASNs allowed in a BGP update.
type: int
auto_local_addr:
description: Automatically determine the local address to be used
for the non-transport AF.
type: bool
default_originate:
description: Originate default route to this neighbor.
type: dict
suboptions:
route_map:
description: Route map reference.
type: str
always:
description: Always originate default route to this neighbor.
type: bool
description:
description: Text describing the neighbor.
type: str
dont_capability_negotiate:
description: Donot perform Capability Negotiation with this
neighbor.
type: bool
ebgp_multihop:
description: Allow BGP connections to indirectly connected
external peers.
type: dict
suboptions:
ttl:
description: Time-to-live in the range 1-255 hops.
type: int
set:
description: If True, ttl is not set.
type: bool
enforce_first_as:
description: Enforce the First AS for EBGP routes(default).
type: bool
export_localpref:
description: Override localpref when exporting to an internal
peer.
type: int
fall_over:
description: Configure BFD protocol options for this peer.
type: bool
graceful_restart:
description: Enable graceful restart mode.
type: bool
graceful_restart_helper:
description: Enable graceful restart helper mode.
type: bool
idle_restart_timer:
description: Neighbor idle restart timer.
type: int
import_localpref:
description: Override localpref when importing from an external
peer.
type: int
link_bandwidth:
description: Enable link bandwidth community for routes to this
peer.
type: dict
suboptions:
set:
description: If True, set link bandwidth
type: bool
auto:
description: Enable link bandwidth auto generation for routes from this peer.
type: bool
default:
description: Enable link bandwidth default generation for routes from this
peer.
type: str
update_delay:
description: Delay outbound route updates.
type: int
local_as:
description: Configure local AS number advertised to peer.
type: dict
suboptions:
as_number:
description: AS number.
type: str
fallback:
description: Prefer router AS Number over local AS Number.
type: bool
local_v6_addr:
description: The local IPv6 address of the neighbor in A:B:C:D:E:F:G:H format.
type: str
maximum_accepted_routes:
description: Maximum number of routes accepted from this peer.
type: dict
suboptions:
count:
description: Maximum number of accepted routes (0 means unlimited).
type: int
warning_limit:
description: Maximum number of accepted routes after which a warning is issued.
(0 means never warn)
type: int
maximum_received_routes:
description: Maximum number of routes received from this peer.
type: dict
suboptions:
count:
description: Maximum number of routes (0 means unlimited).
type: int
warning_limit:
description: Percentage of maximum-routes at which warning is to be issued.
type: dict
suboptions:
limit_count:
description: Number of routes at which to warn.
type: int
limit_percent:
description: Percentage of maximum number of routes at which to warn( 1-100).
type: int
warning_only:
description: Only warn, no restart, if max route limit exceeded.
type: bool
metric_out:
description: MED value to advertise to peer.
type: int
monitoring:
description: Enable BGP Monitoring Protocol for this peer.
type: bool
next_hop_self:
description: Always advertise this router address as the BGP
next hop
type: bool
next_hop_unchanged:
description: Preserve original nexthop while advertising routes to
eBGP peers.
type: bool
next_hop_v6_address:
description: IPv6 next-hop address for the neighbor
type: str
out_delay:
description: Delay outbound route updates.
type: int
encryption_password:
description: Password to use in computation of MD5 hash.
type: dict
suboptions:
type:
description: Encryption type.
type: int
choices: [0, 7]
password:
description: password (up to 80 chars).
type: str
remote_as:
description: Neighbor Autonomous System.
type: str
remove_private_as:
description: Remove private AS number from updates to this peer.
type: dict
suboptions:
set:
description: If True, set remove_private_as.
type: bool
all:
description: Remove private AS number.
type: bool
replace_as:
description: Replace private AS number with local AS number.
type: bool
peer_group:
description: Name of the peer-group.
type: str
prefix_list:
description: Prefix list reference.
type: dict
suboptions:
direction:
description: Configure an inbound/outbound prefix-list.
type: str
choices: ['in', 'out']
name:
description: prefix list name.
type: str
route_map:
description: Route map reference.
type: dict
suboptions:
direction:
description: Configure an inbound/outbound route-map.
type: str
choices: ['in', 'out']
name:
description: Route map name.
type: str
route_reflector_client:
description: Configure peer as a route reflector client.
type: bool
route_to_peer:
description: Use routing table information to reach the peer.
type: bool
send_community:
description: Send community attribute to this neighbor.
type: dict
suboptions:
community_attribute:
description: Type of community attributes to send to this neighbor.
type: str
sub_attribute:
description: Attribute to be sent to the neighbor.
type: str
choices: ['extended', 'link-bandwidth', 'standard']
link_bandwidth_attribute:
description: cumulative/aggregate attribute to be sent.
type: str
choices: ['aggregate', 'divide']
speed:
description: Reference link speed in bits/second
type: str
divide:
description: link-bandwidth divide attribute.
type: str
choices: ['equal', 'ratio']
shut_down:
description: Administratively shut down this neighbor.
type: bool
soft_recognition:
description: Configure how to handle routes that fail import.
type: str
choices: ['all', 'None']
timers:
description: Timers.
type: dict
suboptions:
keepalive:
description: Keep Alive Interval in secs.
type: int
holdtime:
description: Hold time in secs.
type: int
transport:
description: Configure transport options for TCP session.
type: dict
suboptions:
connection_mode:
description: Configure connection-mode for TCP session.
type: str
remote_port:
description: Configure BGP peer TCP port to connect to.
type: int
ttl:
description: BGP ttl security check
type: int
update_source:
description: Specify the local source interface for peer BGP
sessions.
type: str
weight:
description: Weight to assign.
type: int
network:
description: Configure routing for a network.
type: list
elements: dict
suboptions:
address:
description: address prefix.
type: str
route_map:
description: Name of route map.
type: str
redistribute:
description: Redistribute routes in to BGP.
type: list
elements: dict
suboptions:
protocol:
description: Routes to be redistributed.
type: str
choices: ['isis', 'ospf3', 'ospf', 'attached-host', 'connected', 'rip', 'static']
route_map:
description: Route map reference.
type: str
isis_level:
description: Applicable for isis routes. Specify isis route level.
type: str
choices: ['level-1', 'level-2', 'level-1-2']
ospf_route:
description: ospf route options.
type: str
choices: ['internal', 'external', 'nssa_external_1', 'nssa_external_2']
router_id:
description: Router id.
type: str
route_target:
description: Route target.
type: dict
suboptions:
action:
description: Route action.
type: str
choices: ['both', 'import', 'export']
target:
description: Route Target.
type: str
shutdown:
description: When True, shut down BGP.
type: bool
timers:
description: Timers.
type: dict
suboptions:
keepalive:
description: Keep Alive Interval in secs.
type: int
holdtime:
description: Hold time in secs.
type: int
ucmp:
description: Configure unequal cost multipathing.
type: dict
suboptions:
fec:
description: Configure UCMP fec utilization threshold.
type: dict
suboptions:
trigger:
description: UCMP fec utilization too high threshold.
type: int
clear:
description: UCMP FEC utilization Clear thresholds.
type: int
link_bandwidth:
description: Configure link-bandwidth propagation delay.
type: dict
suboptions:
mode:
description: UCMP link bandwidth mode
type: str
choices: ['encoding_weighted', 'recursive']
update_delay:
description: Link Bandwidth Advertisement delay.
type: int
mode:
description: UCMP mode.
type: dict
suboptions:
set:
description: If True, ucmp mode is set to 1.
type: bool
nexthops:
description: Value for total number UCMP nexthops.
type: int
update:
description: Configure BGP update generation.
type: dict
suboptions:
wait_for:
description: wait for options before converge or synchronize.
type: str
choices: ['wait_for_convergence', 'wait_install']
batch_size:
description: batch size for FIB route acknowledgements.
type: int
vlan:
description: Configure MAC VRF BGP for single VLAN support.
type: int
vlan_aware_bundle:
description: Configure MAC VRF BGP for multiple VLAN support.
type: str
vrfs:
description: Configure BGP in a VRF.
type: list
elements: dict
suboptions:
vrf:
description: VRF name.
type: str
aggregate_address:
description: Configure aggregate address.
type: list
elements: dict
suboptions:
address:
description: ipv4/ipv6 address prefix.
type: str
advertise_only:
description: Advertise without installing the generated blackhole route in
FIB.
type: bool
as_set:
description: Generate autonomous system set path information.
type: bool
attribute_map:
description: Name of the route map used to set the attribute of the
aggregate route.
type: str
match_map:
description: Name of the route map used to filter the contributors of the
aggregate route.
type: str
summary_only:
description: Filters all more-specific routes from updates.
type: bool
bgp_params:
description: BGP parameters.
type: dict
suboptions:
additional_paths:
description: BGP additional-paths commands
type: str
choices: ['install', 'send', 'receive']
advertise_inactive:
description: Advertise BGP routes even if they are inactive in RIB.
type: bool
allowas_in:
description: Allow local-as in updates.
type: dict
suboptions:
set:
description: When True, it is set.
type: bool
count:
description: Number of local ASNs allowed in a BGP update.
type: int
always_compare_med:
description: BGP Always Compare MED
type: bool
asn:
description: AS Number notation.
type: str
choices: ['asdot', 'asplain']
auto_local_addr:
description: Automatically determine the local address to be used
for the non-transport AF.
type: bool
bestpath:
description: Select the bestpath selection algorithim for BGP routes.
type: dict
suboptions:
as_path:
description: Select the bestpath selection based on as-path.
type: str
choices: ['ignore', 'multipath_relax']
ecmp_fast:
description: Tie-break BGP paths in a ECMP group based on the order of arrival.
type: bool
med:
description: MED attribute
type: dict
suboptions:
confed:
description: MED Confed.
type: bool
missing_as_worst:
description: MED missing-as-worst.
type: bool
skip:
description: skip one of the tie breaking rules in the bestpath selection.
type: bool
tie_break:
description: Configure the tie-break option for BGP bestpath selection.
choices: ['cluster_list_length', 'router_id']
type: str
client_to_client:
description: client to client configuration.
type: bool
cluster_id:
description: Cluster ID of this router acting as a route reflector.
type: str
confederation:
description: confederation.
type: dict
suboptions:
identifier:
description: Confederation identifier.
type: str
peers:
description: Confederation peers.
type: str
control_plane_filter:
description: Control plane filter for BGP.
type: bool
convergence:
description: Bgp convergence parameters.
type: dict
suboptions:
slow_peer:
description: Maximum amount of time to wait for slow peers to estabilsh session.
type: bool
time:
description: time in secs
type: int
default:
description: Default neighbor configuration commands.
type: str
choices: ['ipv4_unicast', 'ipv6_unicast']
enforce_first_as:
description: Enforce the First AS for EBGP routes(default).
type: bool
host_routes:
description: BGP host routes configuration.
type: bool
labeled_unicast:
description: Labeled Unicast.
type: str
choices: ['ip', 'tunnel']
listen:
description: BGP listen.
type: dict
suboptions:
limit:
description: Set limit on the number of dynamic BGP peers allowed.
type: int
range:
description: Subnet Range to be associated with the peer-group.
type: dict
suboptions:
address:
description: Address prefix
type: str
peer_group:
description: Name of peer group.
type: dict
suboptions:
name:
description: name.
type: str
peer_filter:
description: Name of peer filter.
type: str
remote_as:
description: Neighbor AS number
type: str
log_neighbor_changes:
description: Log neighbor up/down events.
type: bool
missing_policy:
description: Missing policy override configuration commands.
type: dict
suboptions:
direction:
description: Missing policy direction options.
type: str
choices: ['in', 'out']
action:
description: Missing policy action options.
type: str
choices: ['deny', 'permit', 'deny-in-out']
monitoring:
description: Enable Bgp monitoring for all/specified stations.
type: bool
next_hop_unchanged:
description: Preserve original nexthop while advertising routes to
eBGP peers.
type: bool
redistribute_internal:
description: Redistribute internal BGP routes.
type: bool
route:
description: Configure route-map for route installation.
type: str
route_reflector:
description: Configure route reflector options
type: dict
suboptions:
set:
description: When True route_reflector is set.
type: bool
preserve:
description: preserve route attributes, overwriting route-map changes
type: bool
transport:
description: Configure transport port for TCP session
type: int
default_metric:
description: Default metric.
type: int
distance:
description: Define an administrative distance.
type: dict
suboptions:
external:
description: distance for external routes.
type: int
internal:
description: distance for internal routes.
type: int
local:
description: distance for local routes.
type: int
graceful_restart:
description: Enable graceful restart mode.
type: dict
suboptions:
set:
description: When True, graceful restart is set.
type: bool
restart_time:
description: Set the max time needed to restart and come back up.
type: int
stalepath_time:
description: Set the max time to hold onto restarting peer stale paths.
type: int
graceful_restart_helper:
description: Enable graceful restart helper mode.
type: bool
access_group:
description: ip/ipv6 access list configuration.
type: dict
suboptions:
afi:
description: Specify ip/ipv6.
type: str
choices: ['ip', 'ipv6']
acl_name:
description: access list name.
type: str
direction:
description: direction of packets.
type: str
maximum_paths:
description: Maximum number of equal cost paths.
type: dict
suboptions:
max_equal_cost_paths:
description: Value for maximum number of equal cost paths.
type: int
max_installed_ecmp_paths:
description: Value for maximum number of installed ECMP routes.
type: int
neighbor:
description: Configure routing for a network.
type: list
elements: dict
suboptions:
peer:
type: str
description: Neighbor address or peer group.
additional_paths:
description: BGP additional-paths commands.
type: str
choices: ['send', 'receive']
allowas_in:
description: Allow local-as in updates.
type: dict
suboptions:
set:
description: When True, it is set.
type: bool
count:
description: Number of local ASNs allowed in a BGP update.
type: int
auto_local_addr:
description: Automatically determine the local address to be used
for the non-transport AF.
type: bool
default_originate:
description: Originate default route to this neighbor.
type: dict
suboptions:
route_map:
description: Route map reference.
type: str
always:
description: Always originate default route to this neighbor.
type: bool
description:
description: Text describing the neighbor.
type: str
dont_capability_negotiate:
description: Donot perform Capability Negotiation with this
neighbor.
type: bool
ebgp_multihop:
description: Allow BGP connections to indirectly connected
external peers.
type: dict
suboptions:
ttl:
description: Time-to-live in the range 1-255 hops.
type: int
set:
description: If True, ttl is not set.
type: bool
enforce_first_as:
description: Enforce the First AS for EBGP routes(default).
type: bool
export_localpref:
description: Override localpref when exporting to an internal
peer.
type: int
fall_over:
description: Configure BFD protocol options for this peer.
type: bool
graceful_restart:
description: Enable graceful restart mode.
type: bool
graceful_restart_helper:
description: Enable graceful restart helper mode.
type: bool
idle_restart_timer:
description: Neighbor idle restart timer.
type: int
import_localpref:
description: Override localpref when importing from an external
peer.
type: int
link_bandwidth:
description: Enable link bandwidth community for routes to this
peer.
type: dict
suboptions:
set:
description: If True, set link bandwidth
type: bool
auto:
description: Enable link bandwidth auto generation for routes from this peer.
type: bool
default:
description: Enable link bandwidth default generation for routes from this
peer.
type: str
update_delay:
description: Delay outbound route updates.
type: int
local_as:
description: Configure local AS number advertised to peer.
type: dict
suboptions:
as_number:
description: AS number.
type: str
fallback:
description: Prefer router AS Number over local AS Number.
type: bool
local_v6_addr:
description: The local IPv6 address of the neighbor in A:B:C:D:E:F:G:H format.
type: str
maximum_accepted_routes:
description: Maximum number of routes accepted from this peer.
type: dict
suboptions:
count:
description: Maximum number of accepted routes (0 means unlimited).
type: int
warning_limit:
description: Maximum number of accepted routes after which a warning is issued.
(0 means never warn)
type: int
maximum_received_routes:
description: Maximum number of routes received from this peer.
type: dict
suboptions:
count:
description: Maximum number of routes (0 means unlimited).
type: int
warning_limit:
description: Percentage of maximum-routes at which warning is to be issued.
type: dict
suboptions:
limit_count:
description: Number of routes at which to warn.
type: int
limit_percent:
description: Percentage of maximum number of routes at which to warn( 1-100).
type: int
warning_only:
description: Only warn, no restart, if max route limit exceeded.
type: bool
metric_out:
description: MED value to advertise to peer.
type: int
monitoring:
description: Enable BGP Monitoring Protocol for this peer.
type: bool
next_hop_self:
description: Always advertise this router address as the BGP
next hop
type: bool
next_hop_unchanged:
description: Preserve original nexthop while advertising routes to
eBGP peers.
type: bool
next_hop_v6_address:
description: IPv6 next-hop address for the neighbor
type: str
out_delay:
description: Delay outbound route updates.
type: int
encryption_password:
description: Password to use in computation of MD5 hash.
type: dict
suboptions:
type:
description: Encryption type.
type: int
choices: [0, 7]
password:
description: password (up to 80 chars).
type: str
remote_as:
description: Neighbor Autonomous System.
type: str
remove_private_as:
description: Remove private AS number from updates to this peer.
type: dict
suboptions:
set:
description: If True, set remove_private_as.
type: bool
all:
description: Remove private AS number.
type: bool
replace_as:
description: Replace private AS number with local AS number.
type: bool
peer_group:
description: Name of the peer-group.
type: str
prefix_list:
description: Prefix list reference.
type: dict
suboptions:
direction:
description: Configure an inbound/outbound prefix-list.
type: str
choices: ['in', 'out']
name:
description: prefix list name.
type: str
route_map:
description: Route map reference.
type: dict
suboptions:
direction:
description: Configure an inbound/outbound route-map.
type: str
choices: ['in', 'out']
name:
description: Route map name.
type: str
route_reflector_client:
description: Configure peer as a route reflector client.
type: bool
route_to_peer:
description: Use routing table information to reach the peer.
type: bool
send_community:
description: Send community attribute to this neighbor.
type: dict
suboptions:
community_attribute:
description: Type of community attributes to send to this neighbor.
type: str
sub_attribute:
description: Attribute to be sent to the neighbor.
type: str
choices: ['extended', 'link-bandwidth', 'standard']
link_bandwidth_attribute:
description: cumulative/aggregate attribute to be sent.
type: str
choices: ['aggregate', 'divide']
speed:
description: Reference link speed in bits/second
type: str
divide:
description: link-bandwidth divide attribute.
type: str
choices: ['equal', 'ratio']
shut_down:
description: Administratively shut down this neighbor.
type: bool
soft_recognition:
description: Configure how to handle routes that fail import.
type: str
choices: ['all', 'None']
timers:
description: Timers.
type: dict
suboptions:
keepalive:
description: Keep Alive Interval in secs.
type: int
holdtime:
description: Hold time in secs.
type: int
transport:
description: Configure transport options for TCP session.
type: dict
suboptions:
connection_mode:
description: Configure connection-mode for TCP session.
type: str
remote_port:
description: Configure BGP peer TCP port to connect to.
type: int
ttl:
description: BGP ttl security check
type: int
update_source:
description: Specify the local source interface for peer BGP
sessions.
type: str
weight:
description: Weight to assign.
type: int
network:
description: Configure routing for a network.
type: list
elements: dict
suboptions:
address:
description: address prefix.
type: str
route_map:
description: Name of route map.
type: str
redistribute:
description: Redistribute routes in to BGP.
type: list
elements: dict
suboptions:
protocol:
description: Routes to be redistributed.
type: str
choices: ['isis', 'ospf3', 'ospf', 'attached-host', 'connected', 'rip', 'static']
route_map:
description: Route map reference.
type: str
isis_level:
description: Applicable for isis routes. Specify isis route level.
type: str
choices: ['level-1', 'level-2', 'level-1-2']
ospf_route:
description: ospf route options.
type: str
choices: ['internal', 'external', 'nssa_external_1', 'nssa_external_2']
route_target:
description: Route target.
type: dict
suboptions:
action:
description: Route action.
type: str
choices: ['both', 'import', 'export']
target:
description: Route Target.
type: str
router_id:
description: Router id.
type: str
shutdown:
description: When True, shut down BGP.
type: bool
timers:
description: Timers.
type: dict
suboptions:
keepalive:
description: Keep Alive Interval in secs.
type: int
holdtime:
description: Hold time in secs.
type: int
ucmp:
description: Configure unequal cost multipathing.
type: dict
suboptions:
fec:
description: Configure UCMP fec utilization threshold.
type: dict
suboptions:
trigger:
description: UCMP fec utilization too high threshold.
type: int
clear:
description: UCMP FEC utilization Clear thresholds.
type: int
link_bandwidth:
description: Configure link-bandwidth propagation delay.
type: dict
suboptions:
mode:
description: UCMP link bandwidth mode
type: str
choices: ['encoding_weighted', 'recursive']
update_delay:
description: Link Bandwidth Advertisement delay.
type: int
mode:
description: UCMP mode.
type: dict
suboptions:
set:
description: If True, ucmp mode is set to 1.
type: bool
nexthops:
description: Value for total number UCMP nexthops.
type: int
update:
description: Configure BGP update generation.
type: dict
suboptions:
wait_for:
description: wait for options before converge or synchronize.
type: str
choices: ['wait_for_convergence', 'wait_install']
batch_size:
description: batch size for FIB route acknowledgements.
type: int
running_config:
description:
- This option is used only with state I(parsed).
- The value of this option should be the output received from the EOS device by
executing the command B(show running-config | section bgp).
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into Ansible structured data as per the resource module's argspec
and the value is then returned in the I(parsed) key within the result.
type: str
state:
description:
- The state the configuration should be left in.
- State I(purged) removes all the BGP configurations from the
target device. Use caution with this state.('no router bgp <x>')
- State I(deleted) only removes BGP attributes that this modules
manages and does not negate the BGP process completely. Thereby, preserving
address-family related configurations under BGP context.
- Running states I(deleted) and I(replaced) will result in an error if there
are address-family configuration lines present under vrf context that is
is to be removed. Please use the M(arista.eos.eos_bgp_address_family)
module for prior cleanup.
- Refer to examples for more details.
type: str
choices: [deleted, merged, purged, replaced, gathered, rendered, parsed]
default: merged
"""
EXAMPLES = """
# Using merged
# Before state
# veos(config)#show running-config | section bgp
# veos(config)#
- name: Merge provided configuration with device configuration
arista.eos.eos_bgp_global:
config:
as_number: "100"
bgp_params:
host_routes: True
convergence:
slow_peer: True
time: 6
additional_paths: "send"
log_neighbor_changes: True
maximum_paths:
max_equal_cost_paths: 55
aggregate_address:
- address: "1.2.1.0/24"
as_set: true
match_map: "match01"
- address: "5.2.1.0/24"
attribute_map: "attrmatch01"
advertise_only: true
redistribute:
- protocol: "static"
route_map: "map_static"
- protocol: "attached-host"
distance:
internal: 50
neighbor:
- peer: "10.1.3.2"
allowas_in:
set: true
default_originate:
always: true
dont_capability_negotiate: true
export_localpref: 4000
maximum_received_routes:
count: 500
warning_limit:
limit_percent: 5
next_hop_unchanged: true
prefix_list:
name: "prefix01"
direction: "out"
- peer: "peer1"
fall_over: true
link_bandwidth:
update_delay: 5
monitoring: True
send_community:
community_attribute: "extended"
sub_attribute: "link-bandwidth"
link_bandwidth_attribute: "aggregate"
speed: "600"
vlan: 5
state: merged
# After State:
# veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# distance bgp 50 50 50
# maximum-paths 55
# bgp additional-paths send any
# neighbor peer1 peer-group
# neighbor peer1 link-bandwidth update-delay 5
# neighbor peer1 fall-over bfd
# neighbor peer1 monitoring
# neighbor peer1 send-community extended link-bandwidth aggregate 600
# neighbor peer1 maximum-routes 12000
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
# vlan 5
# !
# address-family ipv4
# neighbor 10.1.3.2 prefix-list prefix01 out
# veos(config)#
#
# Module Execution:
#
# "after": {
# "aggregate_address": [
# {
# "address": "1.2.1.0/24",
# "as_set": true,
# "match_map": "match01"
# },
# {
# "address": "5.2.1.0/24",
# "advertise_only": true,
# "attribute_map": "attrmatch01"
# }
# ],
# "as_number": "100",
# "bgp_params": {
# "additional_paths": "send",
# "convergence": {
# "slow_peer": true,
# "time": 6
# }
# },
# "distance": {
# "external": 50,
# "internal": 50,
# "local": 50
# },
# "maximum_paths": {
# "max_equal_cost_paths": 55
# },
# "neighbor": [
# {
# "fall_over": true,
# "link_bandwidth": {
# "set": true,
# "update_delay": 5
# },
# "maximum_received_routes": {
# "count": 12000
# },
# "monitoring": true,
# "peer": "peer1",
# "peer_group": "peer1",
# "send_community": {
# "community_attribute": "extended",
# "link_bandwidth_attribute": "aggregate",
# "speed": "600",
# "sub_attribute": "link-bandwidth"
# }
# },
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "export_localpref": 4000,
# "maximum_received_routes": {
# "count": 500,
# "warning_limit": {
# "limit_percent": 5
# }
# },
# "next_hop_unchanged": true,
# "peer": "10.1.3.2"
# }
# ],
# "redistribute": [
# {
# "protocol": "static",
# "route_map": "map_static"
# },
# {
# "protocol": "attached-host"
# }
# ],
# "vlan": 5
# },
# "before": {},
# "changed": true,
# "commands": [
# "router bgp 100",
# "neighbor 10.1.3.2 allowas-in",
# "neighbor 10.1.3.2 default-originate always",
# "neighbor 10.1.3.2 dont-capability-negotiate",
# "neighbor 10.1.3.2 export-localpref 4000",
# "neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent",
# "neighbor 10.1.3.2 next-hop-unchanged",
# "neighbor 10.1.3.2 prefix-list prefix01 out",
# "neighbor peer1 fall-over bfd",
# "neighbor peer1 link-bandwidth update-delay 5",
# "neighbor peer1 monitoring",
# "neighbor peer1 send-community extended link-bandwidth aggregate 600",
# "redistribute static route-map map_static",
# "redistribute attached-host",
# "aggregate-address 1.2.1.0/24 as-set match-map match01",
# "aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only",
# "bgp host-routes fib direct-install",
# "bgp convergence slow-peer time 6",
# "bgp additional-paths send any",
# "bgp log-neighbor-changes",
# "maximum-paths 55",
# "distance bgp 50",
# "vlan 5"
# ],
# Using replaced:
# Before state:
# veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# distance bgp 50 50 50
# maximum-paths 55
# bgp additional-paths send any
# neighbor peer1 peer-group
# neighbor peer1 link-bandwidth update-delay 5
# neighbor peer1 fall-over bfd
# neighbor peer1 monitoring
# neighbor peer1 send-community extended link-bandwidth aggregate 600
# neighbor peer1 maximum-routes 12000
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
# vlan 5
# !
# address-family ipv4
# neighbor 10.1.3.2 prefix-list prefix01 out
# !
# vrf vrf01
# route-target import 54:11
# neighbor 12.1.3.2 dont-capability-negotiate
# neighbor 12.1.3.2 allowas-in 3
# neighbor 12.1.3.2 default-originate always
# neighbor 12.1.3.2 maximum-routes 12000
# veos(config)#
- name: replace provided configuration with device configuration
arista.eos.eos_bgp_global:
config:
as_number: "100"
bgp_params:
host_routes: True
convergence:
slow_peer: True
time: 6
additional_paths: "send"
log_neighbor_changes: True
vrfs:
- vrf: "vrf01"
maximum_paths:
max_equal_cost_paths: 55
aggregate_address:
- address: "1.2.1.0/24"
as_set: true
match_map: "match01"
- address: "5.2.1.0/24"
attribute_map: "attrmatch01"
advertise_only: true
redistribute:
- protocol: "static"
route_map: "map_static"
- protocol: "attached-host"
distance:
internal: 50
neighbor:
- peer: "10.1.3.2"
allowas_in:
set: true
default_originate:
always: true
dont_capability_negotiate: true
export_localpref: 4000
maximum_received_routes:
count: 500
warning_limit:
limit_percent: 5
next_hop_unchanged: true
prefix_list:
name: "prefix01"
direction: "out"
- peer: "peer1"
fall_over: true
link_bandwidth:
update_delay: 5
monitoring: True
send_community:
community_attribute: "extended"
sub_attribute: "link-bandwidth"
link_bandwidth_attribute: "aggregate"
speed: "600"
state: replaced
# After State:
# veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# bgp additional-paths send any
# !
# vrf vrf01
# distance bgp 50 50 50
# maximum-paths 55
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
# address-family ipv4
# neighbor 10.1.3.2 prefix-list prefix01 out
# veos(config)#
#
#
# Module Execution:
#
# "after": {
# "as_number": "100",
# "bgp_params": {
# "additional_paths": "send",
# "convergence": {
# "slow_peer": true,
# "time": 6
# }
# },
# "vrfs": [
# {
# "aggregate_address": [
# {
# "address": "1.2.1.0/24",
# "as_set": true,
# "match_map": "match01"
# },
# {
# "address": "5.2.1.0/24",
# "advertise_only": true,
# "attribute_map": "attrmatch01"
# }
# ],
# "distance": {
# "external": 50,
# "internal": 50,
# "local": 50
# },
# "maximum_paths": {
# "max_equal_cost_paths": 55
# },
# "neighbor": [
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "export_localpref": 4000,
# "maximum_received_routes": {
# "count": 500,
# "warning_limit": {
# "limit_percent": 5
# }
# },
# "next_hop_unchanged": true,
# "peer": "10.1.3.2"
# }
# ],
# "redistribute": [
# {
# "protocol": "static",
# "route_map": "map_static"
# },
# {
# "protocol": "attached-host"
# }
# ],
# "vrf": "vrf01"
# }
# ]
# },
# "before": {
# "aggregate_address": [
# {
# "address": "1.2.1.0/24",
# "as_set": true,
# "match_map": "match01"
# },
# {
# "address": "5.2.1.0/24",
# "advertise_only": true,
# "attribute_map": "attrmatch01"
# }
# ],
# "as_number": "100",
# "bgp_params": {
# "additional_paths": "send",
# "convergence": {
# "slow_peer": true,
# "time": 6
# }
# },
# "distance": {
# "external": 50,
# "internal": 50,
# "local": 50
# },
# "maximum_paths": {
# "max_equal_cost_paths": 55
# },
# "neighbor": [
# {
# "fall_over": true,
# "link_bandwidth": {
# "set": true,
# "update_delay": 5
# },
# "maximum_received_routes": {
# "count": 12000
# },
# "monitoring": true,
# "peer": "peer1",
# "peer_group": "peer1",
# "send_community": {
# "community_attribute": "extended",
# "link_bandwidth_attribute": "aggregate",
# "speed": "600",
# "sub_attribute": "link-bandwidth"
# }
# },
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "export_localpref": 4000,
# "maximum_received_routes": {
# "count": 500,
# "warning_limit": {
# "limit_percent": 5
# }
# },
# "next_hop_unchanged": true,
# "peer": "10.1.3.2"
# }
# ],
# "redistribute": [
# {
# "protocol": "static",
# "route_map": "map_static"
# },
# {
# "protocol": "attached-host"
# }
# ],
# "vlan": 5,
# "vrfs": [
# {
# "neighbor": [
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "maximum_received_routes": {
# "count": 12000
# },
# "peer": "12.1.3.2"
# }
# ],
# "route_target": {
# "action": "import",
# "target": "54:11"
# },
# "vrf": "vrf01"
# }
# ]
# },
# "changed": true,
# "commands": [
# "router bgp 100",
# "vrf vrf01",
# "no route-target import 54:11",
# "neighbor 10.1.3.2 allowas-in",
# "neighbor 10.1.3.2 default-originate always",
# "neighbor 10.1.3.2 dont-capability-negotiate",
# "neighbor 10.1.3.2 export-localpref 4000",
# "neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent",
# "neighbor 10.1.3.2 next-hop-unchanged",
# "neighbor 10.1.3.2 prefix-list prefix01 out",
# "neighbor peer1 fall-over bfd",
# "neighbor peer1 link-bandwidth update-delay 5",
# "neighbor peer1 monitoring",
# "neighbor peer1 send-community extended link-bandwidth aggregate 600",
# "no neighbor 12.1.3.2",
# "redistribute static route-map map_static",
# "redistribute attached-host",
# "aggregate-address 1.2.1.0/24 as-set match-map match01",
# "aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only",
# "maximum-paths 55",
# "distance bgp 50",
# "exit",
# "no neighbor peer1 peer-group",
# "no neighbor peer1 link-bandwidth update-delay 5",
# "no neighbor peer1 fall-over bfd",
# "no neighbor peer1 monitoring",
# "no neighbor peer1 send-community extended link-bandwidth aggregate 600",
# "no neighbor peer1 maximum-routes 12000",
# "no neighbor 10.1.3.2",
# "no redistribute static route-map map_static",
# "no redistribute attached-host",
# "no aggregate-address 1.2.1.0/24 as-set match-map match01",
# "no aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only",
# "bgp host-routes fib direct-install",
# "bgp log-neighbor-changes",
# "no distance bgp 50 50 50",
# "no maximum-paths 55",
# "no vlan 5"
# ],
#
# Using replaced (in presence of address_family under vrf):
# Before State:
#veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# bgp additional-paths send any
# !
# vrf vrf01
# distance bgp 50 50 50
# maximum-paths 55
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
# address-family ipv4
# neighbor 10.1.3.2 prefix-list prefix01 out
# !
# address-family ipv6
# redistribute dhcp
# veos(config)#
- name: Replace
arista.eos.eos_bgp_global:
config:
as_number: "100"
graceful_restart:
set: True
router_id: "1.1.1.1"
timers:
keepalive: 2
holdtime: 5
ucmp:
mode:
set: True
vlan_aware_bundle: "bundle1 bundle2 bundle3"
state: replaced
# Module Execution:
# fatal: [192.168.122.113]: FAILED! => {
# "changed": false,
# "invocation": {
# "module_args": {
# "config": {
# "access_group": null,
# "aggregate_address": null,
# "as_number": "100",
# "bgp_params": null,
# "default_metric": null,
# "distance": null,
# "graceful_restart": {
# "restart_time": null,
# "set": true,
# "stalepath_time": null
# },
# "graceful_restart_helper": null,
# "maximum_paths": null,
# "monitoring": null,
# "neighbor": null,
# "network": null,
# "redistribute": null,
# "route_target": null,
# "router_id": "1.1.1.1",
# "shutdown": null,
# "timers": {
# "holdtime": 5,
# "keepalive": 2
# },
# "ucmp": {
# "fec": null,
# "link_bandwidth": null,
# "mode": {
# "nexthops": null,
# "set": true
# }
# },
# "update": null,
# "vlan": null,
# "vlan_aware_bundle": "bundle1 bundle2 bundle3",
# "vrfs": null
# },
# "running_config": null,
# "state": "replaced"
# }
# },
# "msg": "Use the _bgp_af module to delete the address_family under vrf, before replacing/deleting the vrf."
# }
# Using deleted:
# Before state:
# veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# bgp additional-paths send any
# !
# vrf vrf01
# distance bgp 50 50 50
# maximum-paths 55
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
- name: Delete configuration
arista.eos.eos_bgp_global:
config:
as_number: "100"
state: deleted
# After State:
# veos(config)#show running-config | section bgp
# router bgp 100
#
#
# Module Execution:
#
# "after": {
# "as_number": "100"
# },
# "before": {
# "as_number": "100",
# "bgp_params": {
# "additional_paths": "send",
# "convergence": {
# "slow_peer": true,
# "time": 6
# }
# },
# "vrfs": [
# {
# "aggregate_address": [
# {
# "address": "1.2.1.0/24",
# "as_set": true,
# "match_map": "match01"
# },
# {
# "address": "5.2.1.0/24",
# "advertise_only": true,
# "attribute_map": "attrmatch01"
# }
# ],
# "distance": {
# "external": 50,
# "internal": 50,
# "local": 50
# },
# "maximum_paths": {
# "max_equal_cost_paths": 55
# },
# "neighbor": [
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "export_localpref": 4000,
# "maximum_received_routes": {
# "count": 500,
# "warning_limit": {
# "limit_percent": 5
# }
# },
# "next_hop_unchanged": true,
# "peer": "10.1.3.2"
# }
# ],
# "redistribute": [
# {
# "protocol": "static",
# "route_map": "map_static"
# },
# {
# "protocol": "attached-host"
# }
# ],
# "vrf": "vrf01"
# }
# ]
# },
# "changed": true,
# "commands": [
# "router bgp 100",
# "no vrf vrf01",
# "no bgp convergence slow-peer time 6",
# "no bgp additional-paths send any"
# ],
#
# Using purged:
# Before state:
# veos(config)#show running-config | section bgp
# router bgp 100
# bgp convergence slow-peer time 6
# distance bgp 50 50 50
# maximum-paths 55
# bgp additional-paths send any
# neighbor peer1 peer-group
# neighbor peer1 link-bandwidth update-delay 5
# neighbor peer1 fall-over bfd
# neighbor peer1 monitoring
# neighbor peer1 send-community extended link-bandwidth aggregate 600
# neighbor peer1 maximum-routes 12000
# neighbor 10.1.3.2 export-localpref 4000
# neighbor 10.1.3.2 next-hop-unchanged
# neighbor 10.1.3.2 dont-capability-negotiate
# neighbor 10.1.3.2 allowas-in 3
# neighbor 10.1.3.2 default-originate always
# neighbor 10.1.3.2 maximum-routes 500 warning-limit 5 percent
# aggregate-address 1.2.1.0/24 as-set match-map match01
# aggregate-address 5.2.1.0/24 attribute-map attrmatch01 advertise-only
# redistribute static route-map map_static
# redistribute attached-host
# !
# vlan 5
# !
# address-family ipv4
# neighbor 10.1.3.2 prefix-list prefix01 out
# !
# vrf vrf01
# route-target import 54:11
# neighbor 12.1.3.2 dont-capability-negotiate
# neighbor 12.1.3.2 allowas-in 3
# neighbor 12.1.3.2 default-originate always
# neighbor 12.1.3.2 maximum-routes 12000
# veos(config)#
- name: Purge configuration
arista.eos.eos_bgp_global:
config:
as_number: "100"
state: purged
# After State:
# veos(config)#show running-config | section bgp
# veos(config)#
# Module Execution:
# "after": {},
# "before": {
# "aggregate_address": [
# {
# "address": "1.2.1.0/24",
# "as_set": true,
# "match_map": "match01"
# },
# {
# "address": "5.2.1.0/24",
# "advertise_only": true,
# "attribute_map": "attrmatch01"
# }
# ],
# "as_number": "100",
# "bgp_params": {
# "additional_paths": "send",
# "convergence": {
# "slow_peer": true,
# "time": 6
# }
# },
# "distance": {
# "external": 50,
# "internal": 50,
# "local": 50
# },
# "maximum_paths": {
# "max_equal_cost_paths": 55
# },
# "neighbor": [
# {
# "fall_over": true,
# "link_bandwidth": {
# "set": true,
# "update_delay": 5
# },
# "maximum_received_routes": {
# "count": 12000
# },
# "monitoring": true,
# "peer": "peer1",
# "peer_group": "peer1",
# "send_community": {
# "community_attribute": "extended",
# "link_bandwidth_attribute": "aggregate",
# "speed": "600",
# "sub_attribute": "link-bandwidth"
# }
# },
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "export_localpref": 4000,
# "maximum_received_routes": {
# "count": 500,
# "warning_limit": {
# "limit_percent": 5
# }
# },
# "next_hop_unchanged": true,
# "peer": "10.1.3.2"
# }
# ],
# "redistribute": [
# {
# "protocol": "static",
# "route_map": "map_static"
# },
# {
# "protocol": "attached-host"
# }
# ],
# "vlan": 5,
# "vrfs": [
# {
# "neighbor": [
# {
# "allowas_in": {
# "count": 3
# },
# "default_originate": {
# "always": true
# },
# "dont_capability_negotiate": true,
# "maximum_received_routes": {
# "count": 12000
# },
# "peer": "12.1.3.2"
# }
# ],
# "route_target": {
# "action": "import",
# "target": "54:11"
# },
# "vrf": "vrf01"
# }
# ]
# },
# "changed": true,
# "commands": [
# "no router bgp 100"
# ],
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.arista.eos.plugins.module_utils.network.eos.argspec.bgp_global.bgp_global import (
Bgp_globalArgs,
)
from ansible_collections.arista.eos.plugins.module_utils.network.eos.config.bgp_global.bgp_global import (
Bgp_global,
)
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
module = AnsibleModule(
argument_spec=Bgp_globalArgs.argument_spec,
mutually_exclusive=[],
required_if=[],
supports_check_mode=False,
)
result = Bgp_global(module).execute_module()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 36.748811
| 112
| 0.466482
| 7,372
| 85,000
| 5.290016
| 0.08044
| 0.02154
| 0.035079
| 0.008077
| 0.907277
| 0.899482
| 0.887943
| 0.885404
| 0.884302
| 0.883045
| 0
| 0.028425
| 0.462353
| 85,000
| 2,312
| 113
| 36.764706
| 0.824923
| 0.007094
| 0
| 0.920125
| 0
| 0.004909
| 0.990833
| 0.026741
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000446
| false
| 0.00357
| 0.007586
| 0
| 0.008032
| 0.000446
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31be07d35715c7deb5c19213badc51362e1b27e3
| 689
|
py
|
Python
|
vhoster/cli/services.py
|
GerardBalaoro/VHoster
|
991a10f30308103d7b187c8d5dba636f0e14b669
|
[
"MIT"
] | 2
|
2020-10-30T12:02:21.000Z
|
2020-12-11T23:42:12.000Z
|
vhoster/cli/services.py
|
GerardBalaoro/VHoster
|
991a10f30308103d7b187c8d5dba636f0e14b669
|
[
"MIT"
] | null | null | null |
vhoster/cli/services.py
|
GerardBalaoro/VHoster
|
991a10f30308103d7b187c8d5dba636f0e14b669
|
[
"MIT"
] | null | null | null |
"""Services CLI Commands"""
from .core import *
@main.command()
@click.argument('services', nargs=-1, required=False)
@pass_state
def start(state, services):
"""Restart all or specified services"""
def selected(service):
return not services or service in services
if selected('apache'):
state.server.restart()
@main.command()
@click.argument('services', nargs=-1, required=False)
@pass_state
def stop(state, services):
"""Stop all or specified services"""
def selected(service):
return not services or service in services
if selected('apache'):
state.server.stop()
if selected('ngrok'):
state.ngrok.stop()
| 20.264706
| 53
| 0.657475
| 84
| 689
| 5.369048
| 0.380952
| 0.066519
| 0.070953
| 0.10643
| 0.736142
| 0.736142
| 0.736142
| 0.736142
| 0.736142
| 0.736142
| 0
| 0.003663
| 0.207547
| 689
| 33
| 54
| 20.878788
| 0.822344
| 0.124819
| 0
| 0.631579
| 0
| 0
| 0.056218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0.105263
| 0.052632
| 0.105263
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 8
|
31d1df784c0bf97eac7f1a5b10c83bb05286f04f
| 6,440
|
py
|
Python
|
tests/test_state.py
|
gmr/rejected
|
564542d38072161f74a0ee9b4b61e38a52e2681d
|
[
"BSD-3-Clause"
] | 26
|
2015-06-16T10:54:03.000Z
|
2022-02-09T13:25:40.000Z
|
tests/test_state.py
|
gmr/rejected
|
564542d38072161f74a0ee9b4b61e38a52e2681d
|
[
"BSD-3-Clause"
] | 26
|
2016-04-25T20:10:13.000Z
|
2020-11-23T18:01:01.000Z
|
tests/test_state.py
|
gmr/rejected
|
564542d38072161f74a0ee9b4b61e38a52e2681d
|
[
"BSD-3-Clause"
] | 13
|
2015-04-30T21:08:11.000Z
|
2021-11-17T05:52:43.000Z
|
"""Tests for the State Class"""
import unittest
try:
from unittest import mock
except ImportError:
import mock
from rejected import state
class TestState(unittest.TestCase):
def setUp(self):
self._obj = state.State()
def test_set_state_invalid_value(self):
self.assertRaises(ValueError, self._obj.set_state, 9999)
def test_set_state_expected_assignment(self):
self.state = self._obj.STATE_IDLE
self._obj.set_state(self._obj.STATE_CONNECTING)
self.assertEqual(self._obj.state, self._obj.STATE_CONNECTING)
def test_set_state_state_start(self):
self.state = self._obj.STATE_IDLE
value = 86400
with mock.patch('time.time', return_value=value):
self._obj.set_state(self._obj.STATE_CONNECTING)
self.assertEqual(self._obj.state_start, value)
def test_state_initializing_desc(self):
self._obj.state = self._obj.STATE_INITIALIZING
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_INITIALIZING])
def test_state_connecting_desc(self):
self._obj.state = self._obj.STATE_CONNECTING
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_CONNECTING])
def test_state_idle_desc(self):
self._obj.state = self._obj.STATE_IDLE
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_IDLE])
def test_state_active_desc(self):
self._obj.state = self._obj.STATE_ACTIVE
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_ACTIVE])
def test_state_stop_requested_desc(self):
self._obj.state = self._obj.STATE_STOP_REQUESTED
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_STOP_REQUESTED])
def test_state_shutting_down_desc(self):
self._obj.state = self._obj.STATE_SHUTTING_DOWN
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_SHUTTING_DOWN])
def test_state_stopped_desc(self):
self._obj.state = self._obj.STATE_STOPPED
self.assertEqual(self._obj.state_description,
self._obj.STATES[self._obj.STATE_STOPPED])
def test_is_idle_state_initializing(self):
self._obj.state = self._obj.STATE_INITIALIZING
self.assertFalse(self._obj.is_idle)
def test_is_idle_state_connecting(self):
self._obj.state = self._obj.STATE_CONNECTING
self.assertFalse(self._obj.is_idle)
def test_is_idle_state_idle(self):
self._obj.state = self._obj.STATE_IDLE
self.assertTrue(self._obj.is_idle)
def test_is_idle_state_processing(self):
self._obj.state = self._obj.STATE_ACTIVE
self.assertFalse(self._obj.is_idle)
def test_is_idle_state_stop_requested(self):
self._obj.state = self._obj.STATE_STOP_REQUESTED
self.assertFalse(self._obj.is_idle)
def test_is_idle_state_shutting_down(self):
self._obj.state = self._obj.STATE_SHUTTING_DOWN
self.assertFalse(self._obj.is_idle)
def test_is_idle_state_stopped(self):
self._obj.state = self._obj.STATE_STOPPED
self.assertFalse(self._obj.is_idle)
def test_is_running_state_initializing(self):
self._obj.state = self._obj.STATE_INITIALIZING
self.assertFalse(self._obj.is_running)
def test_is_running_state_connecting(self):
self._obj.state = self._obj.STATE_CONNECTING
self.assertFalse(self._obj.is_running)
def test_is_running_state_idle(self):
self._obj.state = self._obj.STATE_IDLE
self.assertTrue(self._obj.is_running)
def test_is_running_state_processing(self):
self._obj.state = self._obj.STATE_ACTIVE
self.assertTrue(self._obj.is_running)
def test_is_running_state_stop_requested(self):
self._obj.state = self._obj.STATE_STOP_REQUESTED
self.assertFalse(self._obj.is_running)
def test_is_running_state_shutting_down(self):
self._obj.state = self._obj.STATE_SHUTTING_DOWN
self.assertFalse(self._obj.is_running)
def test_is_running_state_stopped(self):
self._obj.state = self._obj.STATE_STOPPED
self.assertFalse(self._obj.is_running)
def test_is_shutting_down_state_initializing(self):
self._obj.state = self._obj.STATE_INITIALIZING
self.assertFalse(self._obj.is_shutting_down)
def test_is_shutting_down_state_connecting(self):
self._obj.state = self._obj.STATE_CONNECTING
self.assertFalse(self._obj.is_shutting_down)
def test_is_shutting_down_state_idle(self):
self._obj.state = self._obj.STATE_IDLE
self.assertFalse(self._obj.is_shutting_down)
def test_is_shutting_down_state_processing(self):
self._obj.state = self._obj.STATE_ACTIVE
self.assertFalse(self._obj.is_shutting_down)
def test_is_shutting_down_state_stop_requested(self):
self._obj.state = self._obj.STATE_STOP_REQUESTED
self.assertFalse(self._obj.is_shutting_down)
def test_is_shutting_down_state_shutting_down(self):
self._obj.state = self._obj.STATE_SHUTTING_DOWN
self.assertTrue(self._obj.is_shutting_down)
def test_is_shutting_down_state_stopped(self):
self._obj.state = self._obj.STATE_STOPPED
self.assertFalse(self._obj.is_shutting_down)
def test_is_stopped_state_initializing(self):
self._obj.state = self._obj.STATE_INITIALIZING
self.assertFalse(self._obj.is_stopped)
def test_is_stopped_state_connecting(self):
self._obj.state = self._obj.STATE_CONNECTING
self.assertFalse(self._obj.is_stopped)
def test_is_stopped_state_idle(self):
self._obj.state = self._obj.STATE_IDLE
self.assertFalse(self._obj.is_stopped)
def test_is_stopped_state_processing(self):
self._obj.state = self._obj.STATE_ACTIVE
self.assertFalse(self._obj.is_stopped)
def test_is_stopped_state_stop_requested(self):
self._obj.state = self._obj.STATE_STOP_REQUESTED
self.assertFalse(self._obj.is_stopped)
def test_is_stopped_state_shutting_down(self):
self._obj.state = self._obj.STATE_SHUTTING_DOWN
self.assertFalse(self._obj.is_stopped)
| 37.225434
| 74
| 0.713665
| 872
| 6,440
| 4.824541
| 0.059633
| 0.211314
| 0.256715
| 0.157594
| 0.875683
| 0.860946
| 0.850725
| 0.831709
| 0.825053
| 0.777989
| 0
| 0.001742
| 0.197671
| 6,440
| 172
| 75
| 37.44186
| 0.812464
| 0.003882
| 0
| 0.538462
| 0
| 0
| 0.001404
| 0
| 0
| 0
| 0
| 0
| 0.284615
| 1
| 0.292308
| false
| 0
| 0.038462
| 0
| 0.338462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9edf96fb95922c1020fb5f5e9cd59a9d45ac9843
| 33,907
|
py
|
Python
|
train.py
|
taeyen/graph-generation
|
70787c77205bc02e90fa587c22a64706cb975892
|
[
"MIT"
] | null | null | null |
train.py
|
taeyen/graph-generation
|
70787c77205bc02e90fa587c22a64706cb975892
|
[
"MIT"
] | null | null | null |
train.py
|
taeyen/graph-generation
|
70787c77205bc02e90fa587c22a64706cb975892
|
[
"MIT"
] | null | null | null |
import networkx as nx
import numpy as np
import torch
import torch.nn as nn
import torch.nn.init as init
from torch.autograd import Variable
import matplotlib.pyplot as plt
import torch.nn.functional as F
from torch import optim
from torch.optim.lr_scheduler import MultiStepLR
from sklearn.decomposition import PCA
import logging
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence
from time import gmtime, strftime
from sklearn.metrics import roc_curve
from sklearn.metrics import roc_auc_score
from sklearn.metrics import average_precision_score
from random import shuffle
import pickle
from tensorboard_logger import configure, log_value
import scipy.misc
import time as tm
from utils import *
from model import *
from data import *
from args import Args
import create_graphs
def train_vae_epoch(epoch, args, rnn, output, data_loader,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output):
rnn.train()
output.train()
loss_sum = 0
for batch_idx, data in enumerate(data_loader):
rnn.zero_grad()
output.zero_grad()
x_unsorted = data['x'].float()
y_unsorted = data['y'].float()
y_len_unsorted = data['len']
y_len_max = max(y_len_unsorted)
x_unsorted = x_unsorted[:, 0:y_len_max, :]
y_unsorted = y_unsorted[:, 0:y_len_max, :]
# initialize lstm hidden state according to batch size
rnn.hidden = rnn.init_hidden(batch_size=x_unsorted.size(0))
# sort input
y_len,sort_index = torch.sort(y_len_unsorted,0,descending=True)
y_len = y_len.numpy().tolist()
x = torch.index_select(x_unsorted,0,sort_index)
y = torch.index_select(y_unsorted,0,sort_index)
x = Variable(x).cuda()
y = Variable(y).cuda()
# if using ground truth to train
h = rnn(x, pack=True, input_len=y_len)
y_pred,z_mu,z_lsgms = output(h)
y_pred = F.sigmoid(y_pred)
# clean
y_pred = pack_padded_sequence(y_pred, y_len, batch_first=True)
y_pred = pad_packed_sequence(y_pred, batch_first=True)[0]
z_mu = pack_padded_sequence(z_mu, y_len, batch_first=True)
z_mu = pad_packed_sequence(z_mu, batch_first=True)[0]
z_lsgms = pack_padded_sequence(z_lsgms, y_len, batch_first=True)
z_lsgms = pad_packed_sequence(z_lsgms, batch_first=True)[0]
# use cross entropy loss
loss_bce = binary_cross_entropy_weight(y_pred, y)
loss_kl = -0.5 * torch.sum(1 + z_lsgms - z_mu.pow(2) - z_lsgms.exp())
loss_kl /= y.size(0)*y.size(1)*sum(y_len) # normalize
loss = loss_bce + loss_kl
loss.backward()
# update deterministic and lstm
optimizer_output.step()
optimizer_rnn.step()
scheduler_output.step()
scheduler_rnn.step()
z_mu_mean = torch.mean(z_mu.data)
z_sgm_mean = torch.mean(z_lsgms.mul(0.5).exp_().data)
z_mu_min = torch.min(z_mu.data)
z_sgm_min = torch.min(z_lsgms.mul(0.5).exp_().data)
z_mu_max = torch.max(z_mu.data)
z_sgm_max = torch.max(z_lsgms.mul(0.5).exp_().data)
if epoch % args.epochs_log==0 and batch_idx==0: # only output first batch's statistics
print('Epoch: {}/{}, train bce loss: {:.6f}, train kl loss: {:.6f}, graph type: {}, num_layer: {}, hidden: {}'.format(
epoch, args.epochs,loss_bce.data[0], loss_kl.data[0], args.graph_type, args.num_layers, args.hidden_size_rnn))
print('z_mu_mean', z_mu_mean, 'z_mu_min', z_mu_min, 'z_mu_max', z_mu_max, 'z_sgm_mean', z_sgm_mean, 'z_sgm_min', z_sgm_min, 'z_sgm_max', z_sgm_max)
# logging
log_value('bce_loss_'+args.fname, loss_bce.data[0], epoch*args.batch_ratio+batch_idx)
log_value('kl_loss_' +args.fname, loss_kl.data[0], epoch*args.batch_ratio + batch_idx)
log_value('z_mu_mean_'+args.fname, z_mu_mean, epoch*args.batch_ratio + batch_idx)
log_value('z_mu_min_'+args.fname, z_mu_min, epoch*args.batch_ratio + batch_idx)
log_value('z_mu_max_'+args.fname, z_mu_max, epoch*args.batch_ratio + batch_idx)
log_value('z_sgm_mean_'+args.fname, z_sgm_mean, epoch*args.batch_ratio + batch_idx)
log_value('z_sgm_min_'+args.fname, z_sgm_min, epoch*args.batch_ratio + batch_idx)
log_value('z_sgm_max_'+args.fname, z_sgm_max, epoch*args.batch_ratio + batch_idx)
loss_sum += loss.item()
return loss_sum/(batch_idx+1)
def test_vae_epoch(epoch, args, rnn, output, test_batch_size=16, save_histogram=False, sample_time = 1):
rnn.hidden = rnn.init_hidden(test_batch_size)
rnn.eval()
output.eval()
# generate graphs
max_num_node = int(args.max_num_node)
y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
h = rnn(x_step)
y_pred_step, _, _ = output(h)
y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
x_step = sample_sigmoid(y_pred_step, sample=True, sample_time=sample_time)
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_data = y_pred.data
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
G_pred_list = []
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
# save prediction histograms, plot histogram over each time step
# if save_histogram:
# save_prediction_histogram(y_pred_data.cpu().numpy(),
# fname_pred=args.figure_prediction_save_path+args.fname_pred+str(epoch)+'.jpg',
# max_num_node=max_num_node)
return G_pred_list
def test_vae_partial_epoch(epoch, args, rnn, output, data_loader, save_histogram=False,sample_time=1):
rnn.eval()
output.eval()
G_pred_list = []
for batch_idx, data in enumerate(data_loader):
x = data['x'].float()
y = data['y'].float()
y_len = data['len']
test_batch_size = x.size(0)
rnn.hidden = rnn.init_hidden(test_batch_size)
# generate graphs
max_num_node = int(args.max_num_node)
y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
print('finish node',i)
h = rnn(x_step)
y_pred_step, _, _ = output(h)
y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
x_step = sample_sigmoid_supervised(y_pred_step, y[:,i:i+1,:].cuda(), current=i, y_len=y_len, sample_time=sample_time)
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_data = y_pred.data
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
return G_pred_list
def train_mlp_epoch(epoch, args, rnn, output, data_loader,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output):
rnn.train()
output.train()
loss_sum = 0
for batch_idx, data in enumerate(data_loader):
rnn.zero_grad()
output.zero_grad()
x_unsorted = data['x'].float()
y_unsorted = data['y'].float()
y_len_unsorted = data['len']
y_len_max = max(y_len_unsorted)
x_unsorted = x_unsorted[:, 0:y_len_max, :]
y_unsorted = y_unsorted[:, 0:y_len_max, :]
# initialize lstm hidden state according to batch size
rnn.hidden = rnn.init_hidden(batch_size=x_unsorted.size(0))
# sort input
y_len,sort_index = torch.sort(y_len_unsorted,0,descending=True)
y_len = y_len.numpy().tolist()
x = torch.index_select(x_unsorted,0,sort_index)
y = torch.index_select(y_unsorted,0,sort_index)
x = Variable(x).cuda()
y = Variable(y).cuda()
h = rnn(x, pack=True, input_len=y_len)
y_pred = output(h)
y_pred = F.sigmoid(y_pred)
# clean
y_pred = pack_padded_sequence(y_pred, y_len, batch_first=True)
y_pred = pad_packed_sequence(y_pred, batch_first=True)[0]
# use cross entropy loss
loss = binary_cross_entropy_weight(y_pred, y)
loss.backward()
# update deterministic and lstm
optimizer_output.step()
optimizer_rnn.step()
scheduler_output.step()
scheduler_rnn.step()
if epoch % args.epochs_log==0 and batch_idx==0: # only output first batch's statistics
print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, num_layer: {}, hidden: {}'.format(
epoch, args.epochs,loss.item(), args.graph_type, args.num_layers, args.hidden_size_rnn))
# logging
log_value('loss_'+args.fname, loss.item(), epoch*args.batch_ratio+batch_idx)
loss_sum += loss.item()
return loss_sum/(batch_idx+1)
def test_mlp_epoch(epoch, args, rnn, output, test_batch_size=16, save_histogram=False,sample_time=1):
rnn.hidden = rnn.init_hidden(test_batch_size)
rnn.eval()
output.eval()
# generate graphs
max_num_node = int(args.max_num_node)
y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
h = rnn(x_step)
y_pred_step = output(h)
y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
x_step = sample_sigmoid(y_pred_step, sample=True, sample_time=sample_time)
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_data = y_pred.data
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
G_pred_list = []
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
# # save prediction histograms, plot histogram over each time step
# if save_histogram:
# save_prediction_histogram(y_pred_data.cpu().numpy(),
# fname_pred=args.figure_prediction_save_path+args.fname_pred+str(epoch)+'.jpg',
# max_num_node=max_num_node)
return G_pred_list
def test_mlp_partial_epoch(epoch, args, rnn, output, data_loader, save_histogram=False,sample_time=1):
rnn.eval()
output.eval()
G_pred_list = []
for batch_idx, data in enumerate(data_loader):
x = data['x'].float()
y = data['y'].float()
y_len = data['len']
test_batch_size = x.size(0)
rnn.hidden = rnn.init_hidden(test_batch_size)
# generate graphs
max_num_node = int(args.max_num_node)
y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
print('finish node',i)
h = rnn(x_step)
y_pred_step = output(h)
y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
x_step = sample_sigmoid_supervised(y_pred_step, y[:,i:i+1,:].cuda(), current=i, y_len=y_len, sample_time=sample_time)
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_data = y_pred.data
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
return G_pred_list
def test_mlp_partial_simple_epoch(epoch, args, rnn, output, data_loader, save_histogram=False,sample_time=1):
rnn.eval()
output.eval()
G_pred_list = []
for batch_idx, data in enumerate(data_loader):
x = data['x'].float()
y = data['y'].float()
y_len = data['len']
test_batch_size = x.size(0)
rnn.hidden = rnn.init_hidden(test_batch_size)
# generate graphs
max_num_node = int(args.max_num_node)
y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
print('finish node',i)
h = rnn(x_step)
y_pred_step = output(h)
y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
x_step = sample_sigmoid_supervised_simple(y_pred_step, y[:,i:i+1,:].cuda(), current=i, y_len=y_len, sample_time=sample_time)
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_data = y_pred.data
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
return G_pred_list
def train_mlp_forward_epoch(epoch, args, rnn, output, data_loader):
rnn.train()
output.train()
loss_sum = 0
for batch_idx, data in enumerate(data_loader):
rnn.zero_grad()
output.zero_grad()
x_unsorted = data['x'].float()
y_unsorted = data['y'].float()
y_len_unsorted = data['len']
y_len_max = max(y_len_unsorted)
x_unsorted = x_unsorted[:, 0:y_len_max, :]
y_unsorted = y_unsorted[:, 0:y_len_max, :]
# initialize lstm hidden state according to batch size
rnn.hidden = rnn.init_hidden(batch_size=x_unsorted.size(0))
# sort input
y_len,sort_index = torch.sort(y_len_unsorted,0,descending=True)
y_len = y_len.numpy().tolist()
x = torch.index_select(x_unsorted,0,sort_index)
y = torch.index_select(y_unsorted,0,sort_index)
x = Variable(x).cuda()
y = Variable(y).cuda()
h = rnn(x, pack=True, input_len=y_len)
y_pred = output(h)
y_pred = F.sigmoid(y_pred)
# clean
y_pred = pack_padded_sequence(y_pred, y_len, batch_first=True)
y_pred = pad_packed_sequence(y_pred, batch_first=True)[0]
# use cross entropy loss
loss = 0
for j in range(y.size(1)):
# print('y_pred',y_pred[0,j,:],'y',y[0,j,:])
end_idx = min(j+1,y.size(2))
loss += binary_cross_entropy_weight(y_pred[:,j,0:end_idx], y[:,j,0:end_idx])*end_idx
if epoch % args.epochs_log==0 and batch_idx==0: # only output first batch's statistics
print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, num_layer: {}, hidden: {}'.format(
epoch, args.epochs,loss.item(), args.graph_type, args.num_layers, args.hidden_size_rnn))
# logging
log_value('loss_'+args.fname, loss.item(), epoch*args.batch_ratio+batch_idx)
loss_sum += loss.item()
return loss_sum/(batch_idx+1)
## too complicated, deprecated
# def test_mlp_partial_bfs_epoch(epoch, args, rnn, output, data_loader, save_histogram=False,sample_time=1):
# rnn.eval()
# output.eval()
# G_pred_list = []
# for batch_idx, data in enumerate(data_loader):
# x = data['x'].float()
# y = data['y'].float()
# y_len = data['len']
# test_batch_size = x.size(0)
# rnn.hidden = rnn.init_hidden(test_batch_size)
# # generate graphs
# max_num_node = int(args.max_num_node)
# y_pred = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # normalized prediction score
# y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
# x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
# for i in range(max_num_node):
# # 1 back up hidden state
# hidden_prev = Variable(rnn.hidden.data).cuda()
# h = rnn(x_step)
# y_pred_step = output(h)
# y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
# x_step = sample_sigmoid_supervised(y_pred_step, y[:,i:i+1,:].cuda(), current=i, y_len=y_len, sample_time=sample_time)
# y_pred_long[:, i:i + 1, :] = x_step
#
# rnn.hidden = Variable(rnn.hidden.data).cuda()
#
# print('finish node', i)
# y_pred_data = y_pred.data
# y_pred_long_data = y_pred_long.data.long()
#
# # save graphs as pickle
# for i in range(test_batch_size):
# adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
# G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
# G_pred_list.append(G_pred)
# return G_pred_list
def train_rnn_epoch(epoch, args, rnn, output, data_loader,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output):
rnn.train()
output.train()
loss_sum = 0
for batch_idx, data in enumerate(data_loader):
rnn.zero_grad()
output.zero_grad()
x_unsorted = data['x'].float()
y_unsorted = data['y'].float()
y_len_unsorted = data['len']
y_len_max = max(y_len_unsorted)
x_unsorted = x_unsorted[:, 0:y_len_max, :]
y_unsorted = y_unsorted[:, 0:y_len_max, :]
# initialize lstm hidden state according to batch size
rnn.hidden = rnn.init_hidden(batch_size=x_unsorted.size(0))
# output.hidden = output.init_hidden(batch_size=x_unsorted.size(0)*x_unsorted.size(1))
# sort input
y_len,sort_index = torch.sort(y_len_unsorted,0,descending=True)
y_len = y_len.numpy().tolist()
x = torch.index_select(x_unsorted,0,sort_index)
y = torch.index_select(y_unsorted,0,sort_index)
# input, output for output rnn module
# a smart use of pytorch builtin function: pack variable--b1_l1,b2_l1,...,b1_l2,b2_l2,...
y_reshape = pack_padded_sequence(y,y_len,batch_first=True).data
# reverse y_reshape, so that their lengths are sorted, add dimension
idx = [i for i in range(y_reshape.size(0)-1, -1, -1)]
idx = torch.LongTensor(idx)
y_reshape = y_reshape.index_select(0, idx)
y_reshape = y_reshape.view(y_reshape.size(0),y_reshape.size(1),1)
output_x = torch.cat((torch.ones(y_reshape.size(0),1,1),y_reshape[:,0:-1,0:1]),dim=1)
output_y = y_reshape
# batch size for output module: sum(y_len)
output_y_len = []
output_y_len_bin = np.bincount(np.array(y_len))
for i in range(len(output_y_len_bin)-1,0,-1):
count_temp = np.sum(output_y_len_bin[i:]) # count how many y_len is above i
output_y_len.extend([min(i,y.size(2))]*count_temp) # put them in output_y_len; max value should not exceed y.size(2)
# pack into variable
x = Variable(x).cuda()
y = Variable(y).cuda()
output_x = Variable(output_x).cuda()
output_y = Variable(output_y).cuda()
# print(output_y_len)
# print('len',len(output_y_len))
# print('y',y.size())
# print('output_y',output_y.size())
# if using ground truth to train
h = rnn(x, pack=True, input_len=y_len)
h = pack_padded_sequence(h,y_len,batch_first=True).data # get packed hidden vector
# reverse h
idx = [i for i in range(h.size(0) - 1, -1, -1)]
idx = Variable(torch.LongTensor(idx)).cuda()
h = h.index_select(0, idx)
hidden_null = Variable(torch.zeros(args.num_layers-1, h.size(0), h.size(1))).cuda()
output.hidden = torch.cat((h.view(1,h.size(0),h.size(1)),hidden_null),dim=0) # num_layers, batch_size, hidden_size
y_pred = output(output_x, pack=True, input_len=output_y_len)
y_pred = F.sigmoid(y_pred)
# clean
y_pred = pack_padded_sequence(y_pred, output_y_len, batch_first=True)
y_pred = pad_packed_sequence(y_pred, batch_first=True)[0]
output_y = pack_padded_sequence(output_y,output_y_len,batch_first=True)
output_y = pad_packed_sequence(output_y,batch_first=True)[0]
# use cross entropy loss
loss = binary_cross_entropy_weight(y_pred, output_y)
loss.backward()
# update deterministic and lstm
optimizer_output.step()
optimizer_rnn.step()
scheduler_output.step()
scheduler_rnn.step()
if epoch % args.epochs_log==0 and batch_idx==0: # only output first batch's statistics
print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, num_layer: {}, hidden: {}'.format(
epoch, args.epochs,loss.item(), args.graph_type, args.num_layers, args.hidden_size_rnn))
# logging
log_value('loss_'+args.fname, loss.item(), epoch*args.batch_ratio+batch_idx)
feature_dim = y.size(1)*y.size(2)
loss_sum += loss.item()*feature_dim
return loss_sum/(batch_idx+1)
def test_rnn_epoch(epoch, args, rnn, output, test_batch_size=16):
rnn.hidden = rnn.init_hidden(test_batch_size)
rnn.eval()
output.eval()
# generate graphs
max_num_node = int(args.max_num_node)
y_pred_long = Variable(torch.zeros(test_batch_size, max_num_node, args.max_prev_node)).cuda() # discrete prediction
x_step = Variable(torch.ones(test_batch_size,1,args.max_prev_node)).cuda()
for i in range(max_num_node):
h = rnn(x_step)
# output.hidden = h.permute(1,0,2)
hidden_null = Variable(torch.zeros(args.num_layers - 1, h.size(0), h.size(2))).cuda()
output.hidden = torch.cat((h.permute(1,0,2), hidden_null),
dim=0) # num_layers, batch_size, hidden_size
x_step = Variable(torch.zeros(test_batch_size,1,args.max_prev_node)).cuda()
output_x_step = Variable(torch.ones(test_batch_size,1,1)).cuda()
for j in range(min(args.max_prev_node,i+1)):
output_y_pred_step = output(output_x_step)
output_x_step = sample_sigmoid(output_y_pred_step, sample=True, sample_time=1)
x_step[:,:,j:j+1] = output_x_step
output.hidden = Variable(output.hidden.data).cuda()
y_pred_long[:, i:i + 1, :] = x_step
rnn.hidden = Variable(rnn.hidden.data).cuda()
y_pred_long_data = y_pred_long.data.long()
# save graphs as pickle
G_pred_list = []
for i in range(test_batch_size):
adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
G_pred = get_graph(adj_pred) # get a graph from zero-padded adj
G_pred_list.append(G_pred)
return G_pred_list
def train_rnn_forward_epoch(epoch, args, rnn, output, data_loader):
rnn.train()
output.train()
loss_sum = 0
for batch_idx, data in enumerate(data_loader):
rnn.zero_grad()
output.zero_grad()
x_unsorted = data['x'].float()
y_unsorted = data['y'].float()
y_len_unsorted = data['len']
y_len_max = max(y_len_unsorted)
x_unsorted = x_unsorted[:, 0:y_len_max, :]
y_unsorted = y_unsorted[:, 0:y_len_max, :]
# initialize lstm hidden state according to batch size
rnn.hidden = rnn.init_hidden(batch_size=x_unsorted.size(0))
# output.hidden = output.init_hidden(batch_size=x_unsorted.size(0)*x_unsorted.size(1))
# sort input
y_len,sort_index = torch.sort(y_len_unsorted,0,descending=True)
y_len = y_len.numpy().tolist()
x = torch.index_select(x_unsorted,0,sort_index)
y = torch.index_select(y_unsorted,0,sort_index)
# input, output for output rnn module
# a smart use of pytorch builtin function: pack variable--b1_l1,b2_l1,...,b1_l2,b2_l2,...
y_reshape = pack_padded_sequence(y,y_len,batch_first=True).data
# reverse y_reshape, so that their lengths are sorted, add dimension
idx = [i for i in range(y_reshape.size(0)-1, -1, -1)]
idx = torch.LongTensor(idx)
y_reshape = y_reshape.index_select(0, idx)
y_reshape = y_reshape.view(y_reshape.size(0),y_reshape.size(1),1)
output_x = torch.cat((torch.ones(y_reshape.size(0),1,1),y_reshape[:,0:-1,0:1]),dim=1)
output_y = y_reshape
# batch size for output module: sum(y_len)
output_y_len = []
output_y_len_bin = np.bincount(np.array(y_len))
for i in range(len(output_y_len_bin)-1,0,-1):
count_temp = np.sum(output_y_len_bin[i:]) # count how many y_len is above i
output_y_len.extend([min(i,y.size(2))]*count_temp) # put them in output_y_len; max value should not exceed y.size(2)
# pack into variable
x = Variable(x).cuda()
y = Variable(y).cuda()
output_x = Variable(output_x).cuda()
output_y = Variable(output_y).cuda()
# print(output_y_len)
# print('len',len(output_y_len))
# print('y',y.size())
# print('output_y',output_y.size())
# if using ground truth to train
h = rnn(x, pack=True, input_len=y_len)
h = pack_padded_sequence(h,y_len,batch_first=True).data # get packed hidden vector
# reverse h
idx = [i for i in range(h.size(0) - 1, -1, -1)]
idx = Variable(torch.LongTensor(idx)).cuda()
h = h.index_select(0, idx)
hidden_null = Variable(torch.zeros(args.num_layers-1, h.size(0), h.size(1))).cuda()
output.hidden = torch.cat((h.view(1,h.size(0),h.size(1)),hidden_null),dim=0) # num_layers, batch_size, hidden_size
y_pred = output(output_x, pack=True, input_len=output_y_len)
y_pred = F.sigmoid(y_pred)
# clean
y_pred = pack_padded_sequence(y_pred, output_y_len, batch_first=True)
y_pred = pad_packed_sequence(y_pred, batch_first=True)[0]
output_y = pack_padded_sequence(output_y,output_y_len,batch_first=True)
output_y = pad_packed_sequence(output_y,batch_first=True)[0]
# use cross entropy loss
loss = binary_cross_entropy_weight(y_pred, output_y)
if epoch % args.epochs_log==0 and batch_idx==0: # only output first batch's statistics
print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, num_layer: {}, hidden: {}'.format(
epoch, args.epochs,loss.item(), args.graph_type, args.num_layers, args.hidden_size_rnn))
# logging
log_value('loss_'+args.fname, loss.item(), epoch*args.batch_ratio+batch_idx)
# print(y_pred.size())
feature_dim = y_pred.size(0)*y_pred.size(1)
loss_sum += loss.item()*feature_dim/y.size(0)
return loss_sum/(batch_idx+1)
########### train function for LSTM + VAE
def train(args, dataset_train, rnn, output):
# check if load existing model
if args.load:
fname = args.model_save_path + args.fname + 'lstm_' + str(args.load_epoch) + '.dat'
rnn.load_state_dict(torch.load(fname))
fname = args.model_save_path + args.fname + 'output_' + str(args.load_epoch) + '.dat'
output.load_state_dict(torch.load(fname))
args.lr = 0.00001
epoch = args.load_epoch
print('model loaded!, lr: {}'.format(args.lr))
else:
epoch = 1
# initialize optimizer
optimizer_rnn = optim.Adam(list(rnn.parameters()), lr=args.lr)
optimizer_output = optim.Adam(list(output.parameters()), lr=args.lr)
scheduler_rnn = MultiStepLR(optimizer_rnn, milestones=args.milestones, gamma=args.lr_rate)
scheduler_output = MultiStepLR(optimizer_output, milestones=args.milestones, gamma=args.lr_rate)
# start main loop
time_all = np.zeros(args.epochs)
while epoch<=args.epochs:
time_start = tm.time()
# train
if 'GraphRNN_VAE' in args.note:
train_vae_epoch(epoch, args, rnn, output, dataset_train,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output)
elif 'GraphRNN_MLP' in args.note:
train_mlp_epoch(epoch, args, rnn, output, dataset_train,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output)
elif 'GraphRNN_RNN' in args.note:
train_rnn_epoch(epoch, args, rnn, output, dataset_train,
optimizer_rnn, optimizer_output,
scheduler_rnn, scheduler_output)
time_end = tm.time()
time_all[epoch - 1] = time_end - time_start
# test
if epoch % args.epochs_test == 0 and epoch>=args.epochs_test_start:
for sample_time in range(1,4):
G_pred = []
while len(G_pred)<args.test_total_size:
if 'GraphRNN_VAE' in args.note:
G_pred_step = test_vae_epoch(epoch, args, rnn, output, test_batch_size=args.test_batch_size,sample_time=sample_time)
elif 'GraphRNN_MLP' in args.note:
G_pred_step = test_mlp_epoch(epoch, args, rnn, output, test_batch_size=args.test_batch_size,sample_time=sample_time)
elif 'GraphRNN_RNN' in args.note:
G_pred_step = test_rnn_epoch(epoch, args, rnn, output, test_batch_size=args.test_batch_size)
G_pred.extend(G_pred_step)
# save graphs
fname = args.graph_save_path + args.fname_pred + str(epoch) +'_'+str(sample_time) + '.dat'
save_graph_list(G_pred, fname)
if 'GraphRNN_RNN' in args.note:
break
print('test done, graphs saved')
# save model checkpoint
if args.save:
if epoch % args.epochs_save == 0:
fname = args.model_save_path + args.fname + 'lstm_' + str(epoch) + '.dat'
torch.save(rnn.state_dict(), fname)
fname = args.model_save_path + args.fname + 'output_' + str(epoch) + '.dat'
torch.save(output.state_dict(), fname)
epoch += 1
np.save(args.timing_save_path+args.fname,time_all)
########### for graph completion task
def train_graph_completion(args, dataset_test, rnn, output):
fname = args.model_save_path + args.fname + 'lstm_' + str(args.load_epoch) + '.dat'
rnn.load_state_dict(torch.load(fname))
fname = args.model_save_path + args.fname + 'output_' + str(args.load_epoch) + '.dat'
output.load_state_dict(torch.load(fname))
epoch = args.load_epoch
print('model loaded!, epoch: {}'.format(args.load_epoch))
for sample_time in range(1,4):
if 'GraphRNN_MLP' in args.note:
G_pred = test_mlp_partial_simple_epoch(epoch, args, rnn, output, dataset_test,sample_time=sample_time)
if 'GraphRNN_VAE' in args.note:
G_pred = test_vae_partial_epoch(epoch, args, rnn, output, dataset_test,sample_time=sample_time)
# save graphs
fname = args.graph_save_path + args.fname_pred + str(epoch) +'_'+str(sample_time) + 'graph_completion.dat'
save_graph_list(G_pred, fname)
print('graph completion done, graphs saved')
########### for NLL evaluation
def train_nll(args, dataset_train, dataset_test, rnn, output,graph_validate_len,graph_test_len, max_iter = 1000):
fname = args.model_save_path + args.fname + 'lstm_' + str(args.load_epoch) + '.dat'
rnn.load_state_dict(torch.load(fname))
fname = args.model_save_path + args.fname + 'output_' + str(args.load_epoch) + '.dat'
output.load_state_dict(torch.load(fname))
epoch = args.load_epoch
print('model loaded!, epoch: {}'.format(args.load_epoch))
fname_output = args.nll_save_path + args.note + '_' + args.graph_type + '.csv'
with open(fname_output, 'w+') as f:
f.write(str(graph_validate_len)+','+str(graph_test_len)+'\n')
f.write('train,test\n')
for iter in range(max_iter):
if 'GraphRNN_MLP' in args.note:
nll_train = train_mlp_forward_epoch(epoch, args, rnn, output, dataset_train)
nll_test = train_mlp_forward_epoch(epoch, args, rnn, output, dataset_test)
if 'GraphRNN_RNN' in args.note:
nll_train = train_rnn_forward_epoch(epoch, args, rnn, output, dataset_train)
nll_test = train_rnn_forward_epoch(epoch, args, rnn, output, dataset_test)
print('train',nll_train,'test',nll_test)
f.write(str(nll_train)+','+str(nll_test)+'\n')
print('NLL evaluation done')
| 44.555848
| 159
| 0.640812
| 5,113
| 33,907
| 3.947389
| 0.055154
| 0.031214
| 0.031561
| 0.020215
| 0.868553
| 0.856662
| 0.84155
| 0.82079
| 0.813705
| 0.797602
| 0
| 0.009903
| 0.237621
| 33,907
| 760
| 160
| 44.614474
| 0.77087
| 0.159908
| 0
| 0.740113
| 0
| 0.001883
| 0.036835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026365
| false
| 0
| 0.050847
| 0
| 0.097928
| 0.030132
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ee7a159026d62257f531521d3fea88e0ca74aa0
| 5,585
|
py
|
Python
|
tests/test_run_pipeline_operator.py
|
sweatybridge/bedrock-airflow
|
b28d80a071362cb1f414f47b39d786b4ca05351f
|
[
"MIT"
] | null | null | null |
tests/test_run_pipeline_operator.py
|
sweatybridge/bedrock-airflow
|
b28d80a071362cb1f414f47b39d786b4ca05351f
|
[
"MIT"
] | 2
|
2021-08-30T16:29:09.000Z
|
2022-01-28T21:57:59.000Z
|
tests/test_run_pipeline_operator.py
|
sweatybridge/bedrock-airflow
|
b28d80a071362cb1f414f47b39d786b4ca05351f
|
[
"MIT"
] | null | null | null |
import datetime
import sys
import time
import pytest
from airflow import DAG
from airflow.hooks.http_hook import HttpHook
from bedrock_plugin import RunPipelineOperator
if sys.version_info >= (3, 3):
from unittest.mock import PropertyMock, patch
else:
from mock import PropertyMock, patch
def test_run_pipeline(airflow_connection):
environment_id = "test-environment"
pipeline_id = "some-pipeline-id"
run_id = "some-run-id"
dag = DAG("bedrock_dag", start_date=datetime.datetime.now())
op = RunPipelineOperator(
task_id="run_pipeline",
dag=dag,
conn_id=airflow_connection,
pipeline_id=pipeline_id,
run_source_commit="master",
environment_id=environment_id,
)
def bedrockhook_run_side_effect(endpoint, *_args, **_kwargs):
resp = PropertyMock()
if endpoint == RunPipelineOperator.RUN_PIPELINE_PATH.format(pipeline_id):
resp.content = '{{"entity_id": "{}"}}'.format(run_id)
elif endpoint == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(run_id):
resp.status_code = 200
resp.content = '{{"status": "{}"}}'.format(
RunPipelineOperator.SUCCESS_STATUS[0]
)
elif endpoint == RunPipelineOperator.STOP_PIPELINE_RUN_PATH.format(run_id):
resp.content = "OK"
else:
pytest.fail("Called with bad args")
return resp
with patch.object(
HttpHook, "run", side_effect=bedrockhook_run_side_effect
) as mock_resp:
op.execute(None)
assert mock_resp.call_count >= 2
run_pipeline_call = mock_resp.mock_calls[0]
assert run_pipeline_call[1][0] == RunPipelineOperator.RUN_PIPELINE_PATH.format(
pipeline_id
)
check_status_call = mock_resp.mock_calls[1]
assert check_status_call[1][0] == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(
run_id
)
def test_run_pipeline_waiting(airflow_connection):
environment_id = "test-environment"
pipeline_id = "some-pipeline-id"
run_id = "some-run-id"
dag = DAG("bedrock_dag", start_date=datetime.datetime.now())
op = RunPipelineOperator(
task_id="run_pipeline",
dag=dag,
conn_id=airflow_connection,
pipeline_id=pipeline_id,
run_source_commit="master",
environment_id=environment_id,
)
_outer = {"has_waited": False}
def bedrockhook_run_side_effect(endpoint, *_args, **_kwargs):
resp = PropertyMock()
if endpoint == RunPipelineOperator.RUN_PIPELINE_PATH.format(pipeline_id):
resp.content = '{{"entity_id": "{}"}}'.format(run_id)
elif endpoint == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(run_id):
resp.status_code = 200
if not _outer["has_waited"]:
resp.content = '{{"status": "{}"}}'.format(
RunPipelineOperator.WAIT_STATUS[0]
)
_outer["has_waited"] = True
else:
resp.content = '{{"status": "{}"}}'.format(
RunPipelineOperator.SUCCESS_STATUS[0]
)
elif endpoint == RunPipelineOperator.STOP_PIPELINE_RUN_PATH.format(run_id):
resp.content = "OK"
else:
pytest.fail("Called with bad args")
return resp
with patch.object(
HttpHook, "run", side_effect=bedrockhook_run_side_effect
) as mock_resp, patch.object(time, "sleep"):
op.execute(None)
assert mock_resp.call_count >= 2
run_pipeline_call = mock_resp.mock_calls[0]
assert run_pipeline_call[1][0] == RunPipelineOperator.RUN_PIPELINE_PATH.format(
pipeline_id
)
check_status_call = mock_resp.mock_calls[1]
assert check_status_call[1][0] == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(
run_id
)
def test_run_pipeline_failure(airflow_connection):
environment_id = "test-environment"
pipeline_id = "some-pipeline-id"
run_id = "some-run-id"
fail_status = "A huge failure"
dag = DAG("bedrock_dag", start_date=datetime.datetime.now())
op = RunPipelineOperator(
task_id="run_pipeline",
dag=dag,
conn_id=airflow_connection,
pipeline_id=pipeline_id,
run_source_commit="master",
environment_id=environment_id,
)
def bedrockhook_run_side_effect(endpoint, *_args, **_kwargs):
resp = PropertyMock()
if endpoint == RunPipelineOperator.RUN_PIPELINE_PATH.format(pipeline_id):
resp.content = '{{"entity_id": "{}"}}'.format(run_id)
elif endpoint == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(run_id):
resp.status_code = 200
resp.content = '{{"status": "{}"}}'.format(fail_status)
elif endpoint == RunPipelineOperator.STOP_PIPELINE_RUN_PATH.format(run_id):
resp.content = "OK"
else:
pytest.fail("Called with bad args: {}".format(endpoint))
return resp
with patch.object(
HttpHook, "run", side_effect=bedrockhook_run_side_effect
) as mock_resp, pytest.raises(Exception) as ex:
op.execute(None)
assert ex.value.args[0] == "Run status is {}".format(fail_status)
assert mock_resp.call_count >= 2
run_pipeline_call = mock_resp.mock_calls[0]
assert run_pipeline_call[1][0] == RunPipelineOperator.RUN_PIPELINE_PATH.format(
pipeline_id
)
check_status_call = mock_resp.mock_calls[1]
assert check_status_call[1][0] == RunPipelineOperator.GET_PIPELINE_RUN_PATH.format(
run_id
)
| 33.443114
| 87
| 0.654969
| 658
| 5,585
| 5.246201
| 0.142857
| 0.057358
| 0.038239
| 0.054751
| 0.858343
| 0.846176
| 0.846176
| 0.846176
| 0.846176
| 0.846176
| 0
| 0.008455
| 0.237601
| 5,585
| 166
| 88
| 33.644578
| 0.802255
| 0
| 0
| 0.73913
| 0
| 0
| 0.08863
| 0
| 0
| 0
| 0
| 0
| 0.072464
| 1
| 0.043478
| false
| 0
| 0.065217
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7332b8481483a729290fef46b0585c4f9ac1344d
| 23,438
|
py
|
Python
|
microproxy/test/layer/test_http1.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 20
|
2016-04-17T08:43:26.000Z
|
2021-05-31T04:01:27.000Z
|
microproxy/test/layer/test_http1.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 237
|
2016-04-17T07:07:08.000Z
|
2017-01-26T09:15:52.000Z
|
microproxy/test/layer/test_http1.py
|
mike820324/microProxy
|
64c7c5add4759c6e105b9438cd18c0f8c930c7a3
|
[
"MIT"
] | 5
|
2016-04-16T14:22:45.000Z
|
2019-11-27T04:41:55.000Z
|
import h11
import mock
from tornado.gen import coroutine, sleep
from tornado.netutil import add_accept_handler
from tornado.testing import gen_test, bind_unused_port
from unittest import TestCase
from microproxy.context import (
HttpRequest, HttpResponse, HttpHeaders,
LayerContext, ServerContext
)
from microproxy.exception import SrcStreamClosedError, DestStreamClosedError
from microproxy.layer import Http1Layer
from microproxy.layer.application.http1 import (
parse_proxy_path, parse_tunnel_proxy_path)
from microproxy.protocol.http1 import Connection
from microproxy.tornado_ext.iostream import MicroProxyIOStream
from microproxy.test.utils import ProxyAsyncTestCase
class TestHtt1(TestCase):
def test_parse_proxy_path_http_80(self):
self.assertEqual(
parse_proxy_path("http://example.com/"),
("http", "example.com", 80, "/"))
def test_parse_proxy_path_http_8080(self):
self.assertEqual(
parse_proxy_path("http://example.com:8080/"),
("http", "example.com", 8080, "/"))
def test_parse_proxy_path_https_443(self):
self.assertEqual(
parse_proxy_path("https://example.com/"),
("https", "example.com", 443, "/"))
def test_parse_proxy_path_https_8443(self):
self.assertEqual(
parse_proxy_path("https://example.com:8443/"),
("https", "example.com", 8443, "/"))
def test_parse_proxy_path_http_80_index(self):
self.assertEqual(
parse_proxy_path("http://example.com/index"),
("http", "example.com", 80, "/index"))
def test_parse_proxy_path_without_scheme(self):
with self.assertRaises(ValueError):
parse_proxy_path("example.com/")
def test_parse_proxy_path_without_path(self):
with self.assertRaises(ValueError):
parse_proxy_path("http://example.com")
def test_parse_tunnel_proxy_path_http_80(self):
self.assertEqual(
parse_tunnel_proxy_path("example.com:80"),
("http", "example.com", 80))
def test_parse_tunnel_proxy_path_https_443(self):
self.assertEqual(
parse_tunnel_proxy_path("example.com:443"),
("https", "example.com", 443))
def test_parse_tunnel_proxy_path_without_port(self):
with self.assertRaises(ValueError):
parse_tunnel_proxy_path("example.com")
class TestHttp1Layer(ProxyAsyncTestCase):
def setUp(self):
super(TestHttp1Layer, self).setUp()
self.asyncSetUp()
self.src_events = []
self.dest_events = []
@gen_test
def asyncSetUp(self):
self.client_stream, src_stream = yield self.create_iostream_pair()
dest_stream, self.server_stream = yield self.create_iostream_pair()
server_state = ServerContext(
config={},
interceptor=mock.Mock(**{
"publish.return_value": None,
"request.return_value": None,
"response.return_value": None,
})
)
self.http_layer = Http1Layer(
server_state,
LayerContext(mode="socks",
src_stream=src_stream, dest_stream=dest_stream))
self.client_conn = Connection(
h11.CLIENT, self.client_stream,
on_response=self.record_src_event,
on_info_response=self.record_src_event,
on_unhandled=self.ignore_event)
self.server_conn = Connection(
h11.SERVER, self.server_stream,
on_request=self.record_dest_event,
on_unhandled=self.ignore_event)
def record_src_event(self, *args):
self.src_events.append(args)
def record_dest_event(self, *args):
self.dest_events.append(args)
def ignore_event(self, *args):
pass
@coroutine
def read_until_new_event(self, conn, events):
while len(events) == 0:
yield conn.read_bytes()
@gen_test
def test_req_and_resp(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events[0]
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/index")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost")]))
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events[0]
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "200")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "OK")
self.assertEqual(
response.headers,
HttpHeaders([("content-type", "plain/text"), ("transfer-encoding", "chunked")]))
self.assertEqual(response.body, "body")
self.assertTrue(http_layer_future.running())
self.client_stream.close()
self.server_stream.close()
self.http_layer.src_stream.close()
self.http_layer.dest_stream.close()
yield http_layer_future
@gen_test
def test_write_req_to_dest_failed(self):
http_layer_future = self.http_layer.process_and_return_context()
self.server_stream.close()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
with self.assertRaises(DestStreamClosedError):
yield http_layer_future
@gen_test
def test_read_resp_from_dest_failed(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
self.assertTrue(http_layer_future.running())
self.server_stream.close()
with self.assertRaises(DestStreamClosedError):
yield http_layer_future
@gen_test
def test_write_resp_to_src_failed(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/index")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost")]))
self.client_stream.close()
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")]))
with self.assertRaises(SrcStreamClosedError):
yield http_layer_future
@gen_test
def test_replay(self):
self.http_layer.context.mode = "replay"
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/index")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost")]))
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "200")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "OK")
self.assertEqual(
response.headers,
HttpHeaders([("content-type", "plain/text"), ("transfer-encoding", "chunked")]))
self.assertEqual(response.body, "body")
self.assertTrue(http_layer_future.done())
yield http_layer_future
self.assertTrue(self.http_layer.src_stream.closed())
self.assertTrue(self.http_layer.dest_stream.closed())
@gen_test
def test_on_websocket(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/chat",
headers=[("Host", "localhost"), ("Upgrade", "websocket")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/chat")
self.assertEqual(request.headers,
HttpHeaders([("host", "localhost"),
("upgrade", "websocket")]))
self.server_conn.send_info_response(HttpResponse(
version="HTTP/1.1", code="101", reason="Switching Protocol",
headers=[("Upgrade", "websocket"), ("Connection", "Upgrade")]))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "101")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "Switching Protocol")
self.assertEqual(
response.headers,
HttpHeaders([("upgrade", "websocket"), ("connection", "Upgrade")]))
self.assertTrue(http_layer_future.done())
yield http_layer_future
self.assertFalse(self.http_layer.src_stream.closed())
self.assertFalse(self.http_layer.dest_stream.closed())
@gen_test
def test_read_response_without_chunked_and_content_length(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/index",
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/index")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost")]))
yield self.server_stream.write(
(b"HTTP/1.1 200 OK\r\n"
b"Connection: closed\r\n\r\n"
b"body"))
self.server_stream.close()
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "200")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "OK")
self.assertEqual(
response.headers,
HttpHeaders([("connection", "closed"), ('transfer-encoding', 'chunked')]))
self.assertEqual(response.body, "body")
self.assertTrue(http_layer_future.done())
yield http_layer_future
@gen_test
def test_write_info_resp_to_src_failed(self):
http_layer_future = self.http_layer.process_and_return_context()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path="/chat",
headers=[("Host", "localhost"), ("Upgrade", "websocket")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/chat")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost"), ("upgrade", "websocket")]))
self.client_stream.close()
self.server_conn.send_info_response(HttpResponse(
version="HTTP/1.1", code="101", reason="Switching Protocol",
headers=[("Content-Type", "plain/text")]))
with self.assertRaises(SrcStreamClosedError):
yield http_layer_future
def tearDown(self):
self.client_stream.close()
self.server_stream.close()
self.http_layer.src_stream.close()
self.http_layer.dest_stream.close()
class TestHttp1LayerProxying(ProxyAsyncTestCase):
def setUp(self):
super(TestHttp1LayerProxying, self).setUp()
self.asyncSetUp()
self.src_events = []
self.dest_events = []
@gen_test
def asyncSetUp(self):
self.client_stream, src_stream = yield self.create_iostream_pair()
self.listener, self.port = bind_unused_port()
add_accept_handler(self.listener, self.on_server_connnect)
server_state = ServerContext(
config={},
interceptor=mock.Mock(**{
"publish.return_value": None,
"request.return_value": None,
"response.return_value": None,
})
)
self.http_layer = Http1Layer(
server_state,
LayerContext(
mode="http",
src_stream=src_stream))
self.client_conn = Connection(
h11.CLIENT, self.client_stream,
on_response=self.record_src_event,
on_info_response=self.record_src_event,
on_unhandled=self.ignore_event)
self.server_stream = None
self.server_conn = None
def on_server_connnect(self, conn, addr):
self.server_stream = MicroProxyIOStream(conn)
self.server_conn = Connection(
h11.SERVER, self.server_stream,
on_request=self.record_dest_event,
on_unhandled=self.ignore_event)
def record_src_event(self, *args):
self.src_events.append(args)
def record_dest_event(self, *args):
self.dest_events.append(args)
def ignore_event(self, *args):
pass
@coroutine
def read_until_new_event(self, conn, events):
while len(events) == 0:
yield conn.read_bytes()
@coroutine
def wait_for_server_connect(self):
while not self.server_stream:
yield sleep(0.1)
@gen_test
def test_proxy(self):
http_layer_future = self.http_layer.process_and_return_context()
path = "http://127.0.0.1:{0}/".format(self.port)
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path=path,
headers=[("Host", "localhost")]))
yield self.wait_for_server_connect()
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events[0]
self.assertIsInstance(request, HttpRequest)
self.assertEqual(request.method, "GET")
self.assertEqual(request.version, "HTTP/1.1")
self.assertEqual(request.path, "/")
self.assertEqual(request.headers, HttpHeaders([("host", "localhost")]))
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events[0]
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "200")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "OK")
self.assertEqual(
response.headers,
HttpHeaders([("content-type", "plain/text"), ("transfer-encoding", "chunked")]))
self.assertEqual(response.body, "body")
self.assertTrue(http_layer_future.running())
self.client_stream.close()
self.server_stream.close()
self.http_layer.src_stream.close()
self.http_layer.dest_stream.close()
yield http_layer_future
@gen_test
def test_proxy_reuse_connection(self):
http_layer_future = self.http_layer.process_and_return_context()
path = "http://127.0.0.1:{0}/".format(self.port)
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path=path,
headers=[("Host", "localhost")]))
yield self.wait_for_server_connect()
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertTrue(http_layer_future.running())
self.client_conn.start_next_cycle()
self.server_conn.start_next_cycle()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path=path,
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.client_stream.close()
self.server_stream.close()
self.http_layer.src_stream.close()
self.http_layer.dest_stream.close()
yield http_layer_future
@gen_test
def test_proxy_new_connection(self):
prev_dest_stream = mock.Mock(**{
"closed.return_value": False
})
self.http_layer.dest_stream = prev_dest_stream
self.http_layer.context.host = "example.com"
self.http_layer.context.port = 8080
http_layer_future = self.http_layer.process_and_return_context()
path = "http://127.0.0.1:{0}/".format(self.port)
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path=path,
headers=[("Host", "localhost")]))
yield self.wait_for_server_connect()
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertTrue(http_layer_future.running())
self.client_conn.start_next_cycle()
self.server_conn.start_next_cycle()
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="GET", path=path,
headers=[("Host", "localhost")]))
yield self.read_until_new_event(self.server_conn, self.dest_events)
self.assertEqual(len(self.dest_events), 1)
request, = self.dest_events.pop()
self.assertIsInstance(request, HttpRequest)
self.server_conn.send_response(HttpResponse(
version="HTTP/1.1", code="200", reason="OK",
headers=[("Content-Type", "plain/text")], body="body"))
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
prev_dest_stream.close.assert_called_with()
self.client_stream.close()
self.server_stream.close()
self.http_layer.src_stream.close()
self.http_layer.dest_stream.close()
yield http_layer_future
@gen_test
def test_tunnel_on_http(self):
http_layer_future = self.http_layer.process_and_return_context()
path = "127.0.0.1:{0}".format(self.port)
self.client_conn.send_request(HttpRequest(
version="HTTP/1.1", method="CONNECT", path=path,
headers=[
("Host", "127.0.0.1:{0}".format(self.port)),
("Proxy-Connection", "Keep-Alive"),
]))
yield http_layer_future
yield self.read_until_new_event(self.client_conn, self.src_events)
self.assertEqual(len(self.src_events), 1)
response, = self.src_events.pop()
self.assertIsInstance(response, HttpResponse)
self.assertEqual(response.code, "200")
self.assertEqual(response.version, "HTTP/1.1")
self.assertEqual(response.reason, "OK")
yield http_layer_future
def tearDown(self):
self.client_stream.close()
if self.server_stream:
self.server_stream.close()
self.http_layer.src_stream.close()
if self.http_layer.dest_stream:
self.http_layer.dest_stream.close()
self.listener.close()
| 38.297386
| 105
| 0.645832
| 2,743
| 23,438
| 5.280715
| 0.062705
| 0.085951
| 0.040387
| 0.033207
| 0.880428
| 0.861374
| 0.842527
| 0.824646
| 0.808837
| 0.783707
| 0
| 0.014813
| 0.228091
| 23,438
| 611
| 106
| 38.360065
| 0.785817
| 0
| 0
| 0.773737
| 0
| 0
| 0.086057
| 0.001792
| 0
| 0
| 0
| 0
| 0.250505
| 1
| 0.076768
| false
| 0.00404
| 0.026263
| 0
| 0.109091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73426368f89543b7e0d522749a370f0222436c52
| 6,319
|
py
|
Python
|
bot/setting.py
|
dvdrm/gd
|
c004724344577bb608fa0611d10c16b211995f72
|
[
"Apache-2.0"
] | 14
|
2022-03-27T03:10:37.000Z
|
2022-03-31T18:35:18.000Z
|
bot/setting.py
|
dvdrm/gd
|
c004724344577bb608fa0611d10c16b211995f72
|
[
"Apache-2.0"
] | 11
|
2022-03-27T03:37:27.000Z
|
2022-03-31T17:37:47.000Z
|
bot/setting.py
|
dvdrm/gd
|
c004724344577bb608fa0611d10c16b211995f72
|
[
"Apache-2.0"
] | 21
|
2022-03-26T15:32:33.000Z
|
2022-03-31T21:32:22.000Z
|
import json
from telethon import events, Button
from asyncio import exceptions
from .. import jdbot, chat_id, BOT_SET_JSON_FILE_USER, BOT_SET, ch_name
from .utils import split_list, logger, press_event
@jdbot.on(events.NewMessage(from_users=chat_id, pattern='^/set$'))
async def bot_set(event):
SENDER = event.sender_id
try:
msg = await jdbot.send_message(chat_id, '请稍后,正在查询')
with open(BOT_SET_JSON_FILE_USER, 'r', encoding='utf-8') as f:
myset = json.load(f)
info = '您目前设置如下:\n'
for i in myset:
if '命令别名' in i:
continue
else:
info = info + f'\t\t- {i}-->{myset[i]} \n'
info = info + '请点击您要设置的项目,选择后,输入要设置的值,重启生效,垃圾话以 | 进行区隔,黑名单以空格或逗号或顿号区隔'
btn = [Button.inline(i, i) for i in myset if not isinstance(myset[i],dict)]
btn.append(Button.inline('取消', data='cancel'))
btn = split_list(btn, 3)
async with jdbot.conversation(SENDER, timeout=90) as conv:
msg = await jdbot.edit_message(msg, info, buttons=btn, link_preview=False)
convdata = await conv.wait_event(press_event(SENDER))
res = bytes.decode(convdata.data)
if res == 'cancel':
msg = await jdbot.edit_message(msg, '对话已取消')
conv.cancel()
else:
await jdbot.delete_messages(chat_id, msg)
msg = await conv.send_message(f'请输入您要修改的{res}\n如果需要取消,请输入`cancel`或`取消`\n如需自定义或快速修改,请直接修改config/botset.json\n如果为True或False首字符大写\n```{myset[res]}```')
data = await conv.get_response()
if data.raw_text == 'cancel' or data.raw_text == '取消':
await jdbot.delete_messages(chat_id,msg)
await jdbot.send_message(chat_id, '对话已取消')
conv.cancel()
else:
markup = [Button.inline('确认',data='yes'),Button.inline('取消',data='cancel')]
await jdbot.delete_messages(chat_id,msg)
msg = await jdbot.send_message(chat_id, f'是否确认将 ** {res} ** 设置为 **{data.raw_text}**', buttons=markup)
convdata2 = await conv.wait_event(press_event(SENDER))
res2 = bytes.decode(convdata2.data)
if res2 == 'yes':
myset[res] = data.raw_text
with open(BOT_SET_JSON_FILE_USER, 'w+', encoding='utf-8') as f:
json.dump(myset, f)
await jdbot.delete_messages(chat_id, msg)
msg = await jdbot.send_message(chat_id, '已完成修改,重启后生效')
else:
conv.cancel()
await jdbot.delete_messages(chat_id, msg)
msg = await jdbot.send_message(chat_id, '对话已取消')
return
except exceptions.TimeoutError:
msg = await jdbot.edit_message(msg, '选择已超时,对话已停止')
except Exception as e:
msg = await jdbot.edit_message(msg, f'something wrong,I\'m sorry\n{str(e)}')
logger.error(f'something wrong,I\'m sorry\n{str(e)}')
@jdbot.on(events.NewMessage(from_users=chat_id, pattern='^/setname$'))
async def bot_setname(event):
SENDER = event.sender_id
try:
msg = await jdbot.send_message(chat_id, '请稍后,正在查询')
with open(BOT_SET_JSON_FILE_USER, 'r', encoding='utf-8') as f:
myset = json.load(f)
info = '您目前命令别名设置如下:\n'
for i in myset['命令别名']:
info = info + f'\t\t- {i}-->{myset["命令别名"][i]} \n'
info = info + '请点击您要设置的项目,选择后,输入要设置的值,重启生效\n**请注意尽量不要重复,否则可能发生未知错误**'
btn = [Button.inline(i, i) for i in myset['命令别名']]
btn.append(Button.inline('取消', data='cancel'))
btn = split_list(btn, 3)
async with jdbot.conversation(SENDER, timeout=90) as conv:
msg = await jdbot.edit_message(msg, info, buttons=btn, link_preview=False)
convdata = await conv.wait_event(press_event(SENDER))
res = bytes.decode(convdata.data)
if res == 'cancel':
msg = await jdbot.edit_message(msg, '对话已取消')
conv.cancel()
else:
await jdbot.delete_messages(chat_id, msg)
msg = await conv.send_message(f'请输入您要修改的{res}\n如果需要取消,请输入`cancel`或`取消`\n如需自定义或快速修改,请直接修改config/botset.json\n如果为True或False首字符大写\n```{myset["命令别名"][res]}```')
data = await conv.get_response()
if data.raw_text == 'cancel' or data.raw_text == '取消':
await jdbot.delete_messages(chat_id,msg)
msg = await jdbot.send_message(chat_id, '对话已取消')
conv.cancel()
return
else:
markup = [Button.inline('确认',data='yes'),Button.inline('取消',data='cancel')]
await jdbot.delete_messages(chat_id,msg)
msg = await jdbot.send_message(chat_id, f'是否确认将 ** {res} ** 设置为 **{data.raw_text}**', buttons=markup)
convdata2 = await conv.wait_event(press_event(SENDER))
res2 = bytes.decode(convdata2.data)
if res2 == 'yes':
myset['命令别名'][res] = data.raw_text
with open(BOT_SET_JSON_FILE_USER, 'w+', encoding='utf-8') as f:
json.dump(myset, f)
await jdbot.delete_messages(chat_id, msg)
msg = await jdbot.send_message(chat_id, '已完成修改,重启后生效')
else:
conv.cancel()
await jdbot.delete_messages(chat_id, msg)
msg = await jdbot.send_message(chat_id, '对话已取消')
return
except exceptions.TimeoutError:
msg = await jdbot.edit_message(msg, '选择已超时,对话已停止')
except Exception as e:
msg = await jdbot.edit_message(msg, f'something wrong,I\'m sorry\n{str(e)}')
logger.error(f'something wrong,I\'m sorry\n{str(e)}')
if ch_name:
jdbot.add_event_handler(bot_set, events.NewMessage(
from_users=chat_id, pattern=BOT_SET['命令别名']['set']))
jdbot.add_event_handler(bot_setname, events.NewMessage(
from_users=chat_id, pattern=BOT_SET['命令别名']['setname']))
| 51.795082
| 172
| 0.561639
| 783
| 6,319
| 4.381865
| 0.172414
| 0.081609
| 0.068202
| 0.049548
| 0.903818
| 0.874672
| 0.874672
| 0.864762
| 0.844943
| 0.80239
| 0
| 0.004148
| 0.313341
| 6,319
| 121
| 173
| 52.223141
| 0.786587
| 0
| 0
| 0.732759
| 0
| 0.017241
| 0.129926
| 0.054597
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043103
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7350a0068a72e86a2f8d8d77ecb5da15d173bb72
| 211
|
py
|
Python
|
GeoDjango/geodjango/core/views.py
|
lmrissi/piloto_geodjango
|
729576c7e66b5d9b33618d8039bca21228927d21
|
[
"Apache-2.0"
] | null | null | null |
GeoDjango/geodjango/core/views.py
|
lmrissi/piloto_geodjango
|
729576c7e66b5d9b33618d8039bca21228927d21
|
[
"Apache-2.0"
] | null | null | null |
GeoDjango/geodjango/core/views.py
|
lmrissi/piloto_geodjango
|
729576c7e66b5d9b33618d8039bca21228927d21
|
[
"Apache-2.0"
] | null | null | null |
from django.db.models import Max
from django.shortcuts import render
def index(request):
return render(request, 'core/index.html')
def mapa(request):
return render(request, 'core/mapa.html')
| 21.1
| 46
| 0.7109
| 29
| 211
| 5.172414
| 0.517241
| 0.133333
| 0.253333
| 0.346667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180095
| 211
| 9
| 47
| 23.444444
| 0.867052
| 0
| 0
| 0
| 0
| 0
| 0.143564
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
b43322f94621a95cc787a7a4481511b881a92353
| 3,361
|
py
|
Python
|
halotools/mock_observables/pairwise_velocities/tests/test_velocity_marked_npairs_3d.py
|
mclaughlin6464/halotools_old
|
96fbdf5fc156160f19ccd4ae3ee964f831d26fa6
|
[
"BSD-3-Clause"
] | null | null | null |
halotools/mock_observables/pairwise_velocities/tests/test_velocity_marked_npairs_3d.py
|
mclaughlin6464/halotools_old
|
96fbdf5fc156160f19ccd4ae3ee964f831d26fa6
|
[
"BSD-3-Clause"
] | null | null | null |
halotools/mock_observables/pairwise_velocities/tests/test_velocity_marked_npairs_3d.py
|
mclaughlin6464/halotools_old
|
96fbdf5fc156160f19ccd4ae3ee964f831d26fa6
|
[
"BSD-3-Clause"
] | null | null | null |
""" Module providing testing of `halotools.mock_observables.velocity_marked_npairs_3d`
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from astropy.tests.helper import pytest
from astropy.utils.misc import NumpyRNGContext
from ..velocity_marked_npairs_3d import velocity_marked_npairs_3d
from ..velocity_marked_npairs_3d import _velocity_marked_npairs_3d_process_weights as process_weights_3d
__all__ = ('test_velocity_marked_npairs_3d_test1', )
fixed_seed = 43
def test_velocity_marked_npairs_3d_test1():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
weights1 = np.random.random((npts, 6))
weight_func_id = 11
__ = process_weights_3d(sample1, sample1, weights1, weights1, weight_func_id)
def test_velocity_marked_npairs_3d_test2():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
sample2 = np.random.random((npts, 3))
weights1 = np.random.random((npts, 6))
weights2 = np.random.random((npts, 6))
weight_func_id = 11
__ = process_weights_3d(sample1, sample2, weights1, weights2, weight_func_id)
def test_velocity_marked_npairs_3d_test3():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
sample2 = np.random.random((npts, 3))
weights1 = np.random.random((npts, 7))
weights2 = np.random.random((npts, 7))
weight_func_id = 11
with pytest.raises(ValueError) as err:
__ = process_weights_3d(sample1, sample2, weights1, weights2, weight_func_id)
substr = "For this value of `weight_func_id`, there should be"
assert substr in err.value.args[0]
def test_velocity_marked_npairs_3d_test4():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
sample2 = np.random.random((npts, 3))
weights1 = np.random.random(npts)
weights2 = np.random.random(npts)
weight_func_id = 11
with pytest.raises(ValueError) as err:
__ = process_weights_3d(sample1, sample2, weights1, weights2, weight_func_id)
substr = "does not have the correct length. "
assert substr in err.value.args[0]
def test_velocity_marked_npairs_3d_test5():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
sample2 = np.random.random((npts, 3))
weights1 = np.random.random((npts, 3))
weights2 = np.random.random((npts, 3))
weight_func_id = 11
with pytest.raises(ValueError) as err:
__ = process_weights_3d(sample1, sample2, weights1, weights2, weight_func_id)
substr = "For this value of `weight_func_id`, there should be "
assert substr in err.value.args[0]
def test_velocity_marked_npairs_3d_test6():
npts = 10
with NumpyRNGContext(fixed_seed):
sample1 = np.random.random((npts, 3))
sample2 = np.random.random((npts, 3))
weights1 = np.random.random((npts, 3, 4))
weights2 = np.random.random((npts, 3, 4))
weight_func_id = 11
with pytest.raises(ValueError) as err:
__ = process_weights_3d(sample1, sample2, weights1, weights2, weight_func_id)
substr = "You must either pass in a 1-D or 2-D array"
assert substr in err.value.args[0]
| 34.295918
| 104
| 0.699792
| 461
| 3,361
| 4.828633
| 0.195228
| 0.079066
| 0.138365
| 0.177898
| 0.841869
| 0.802336
| 0.747978
| 0.735849
| 0.735849
| 0.704403
| 0
| 0.046753
| 0.198155
| 3,361
| 97
| 105
| 34.649485
| 0.779221
| 0.024398
| 0
| 0.638889
| 0
| 0
| 0.065729
| 0.011006
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.083333
| false
| 0.013889
| 0.083333
| 0
| 0.166667
| 0.013889
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b43554effe18cbe21174be91ad854cc0cfd5c7fc
| 1,588
|
py
|
Python
|
test/integration/connect/test_enter.py
|
chenrui333/cartridge-cli
|
98f361c2a44bc003ff2838fc9b20bd5fa2b4a876
|
[
"BSD-2-Clause"
] | null | null | null |
test/integration/connect/test_enter.py
|
chenrui333/cartridge-cli
|
98f361c2a44bc003ff2838fc9b20bd5fa2b4a876
|
[
"BSD-2-Clause"
] | null | null | null |
test/integration/connect/test_enter.py
|
chenrui333/cartridge-cli
|
98f361c2a44bc003ff2838fc9b20bd5fa2b4a876
|
[
"BSD-2-Clause"
] | null | null | null |
from integration.connect.utils import assert_successful_piped_commands
from integration.connect.utils import assert_exited_piped_commands
from integration.connect.utils import assert_session_push_commands
from integration.connect.utils import assert_error
def test_bad_instance_name(cartridge_cmd, project_with_instances):
project = project_with_instances.project
cmd = [
cartridge_cmd, 'enter', 'unknown-instance',
]
assert_error(project, cmd, "Instance unknown-instance is not running")
def test_enter_piped(cartridge_cmd, project_with_instances):
project = project_with_instances.project
instances = project_with_instances.instances
router = instances['router']
cmd = [
cartridge_cmd, 'enter', router.name,
]
assert_successful_piped_commands(project, cmd, exp_connect='%s.%s' % (project.name, router.name))
def test_instance_exited(cartridge_cmd, project_with_instances):
project = project_with_instances.project
instances = project_with_instances.instances
router = instances['router']
cmd = [
cartridge_cmd, 'enter', router.name,
]
assert_exited_piped_commands(project, cmd, exp_connect='%s.%s' % (project.name, router.name))
def test_session_push(cartridge_cmd, project_with_instances):
project = project_with_instances.project
instances = project_with_instances.instances
router = instances['router']
cmd = [
cartridge_cmd, 'enter', router.name,
]
assert_session_push_commands(project, cmd, exp_connect='%s.%s' % (project.name, router.name))
| 29.407407
| 101
| 0.75063
| 191
| 1,588
| 5.91623
| 0.162304
| 0.10708
| 0.19469
| 0.19115
| 0.810619
| 0.810619
| 0.776106
| 0.734513
| 0.642478
| 0.642478
| 0
| 0
| 0.157431
| 1,588
| 53
| 102
| 29.962264
| 0.844544
| 0
| 0
| 0.5
| 0
| 0
| 0.06864
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b454b0295ebcea227e05f3bfff403257dc5f4578
| 37,743
|
py
|
Python
|
tests/unit/operations/test_solution_stack_ops.py
|
sdolenc/aws-elastic-beanstalk-cli
|
4167a38bd599a4433c62c1d3516b8836248a4171
|
[
"Apache-2.0"
] | 110
|
2020-01-15T22:58:46.000Z
|
2022-03-27T20:47:33.000Z
|
tests/unit/operations/test_solution_stack_ops.py
|
QPC-database/aws-elastic-beanstalk-cli
|
87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7
|
[
"Apache-2.0"
] | 89
|
2020-01-15T23:18:34.000Z
|
2022-03-31T21:56:05.000Z
|
tests/unit/operations/test_solution_stack_ops.py
|
QPC-database/aws-elastic-beanstalk-cli
|
87ad9d8bbe5e4e7cb01b1bd4392eda33cb1943f7
|
[
"Apache-2.0"
] | 50
|
2020-01-15T22:58:53.000Z
|
2022-02-11T17:39:28.000Z
|
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
import yaml
import mock
import unittest
from ebcli.objects.solutionstack import SolutionStack
from ebcli.operations import solution_stack_ops
from ebcli.operations.platformops import PlatformVersion
class TestSolutionstackOps(unittest.TestCase):
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
@mock.patch('ebcli.operations.platform_version_ops.list_custom_platform_versions')
def test_find_solution_stack_from_string(
self,
custom_platforms_lister_mock,
solution_stack_lister_mock
):
solution_strings = [
'docker-1.11.2',
'docker-1.12.6',
'docker-1.6.2',
'docker-1.7.1',
'docker-1.9.1',
'docker-17.03.1-ce',
'glassfish-4.0-java-7-(preconfigured-docker)',
'glassfish-4.1-java-8-(preconfigured-docker)',
'go-1.3-(preconfigured-docker)',
'go-1.4',
'go-1.4-(preconfigured-docker)',
'go-1.5',
'go-1.6',
'go-1.8',
'iis-10.0',
'iis-7.5',
'iis-8',
'iis-8.5',
'java-7',
'java-8',
'multi-container-docker-1.11.2-(generic)',
'multi-container-docker-1.6.2-(generic)',
'multi-container-docker-1.9.1-(generic)',
'multi-container-docker-17.03.1-ce-(generic)',
'node.js',
'packer-1.0.0',
'packer-1.0.3',
'php-5.3',
'php-5.4',
'php-5.5',
'php-5.6',
'php-7.0',
'python',
'python-2.7',
'python-3.4',
'python-3.4-(preconfigured-docker)',
'ruby-1.9.3',
'ruby-2.0-(passenger-standalone)',
'ruby-2.0-(puma)',
'ruby-2.1-(passenger-standalone)',
'ruby-2.1-(puma)',
'ruby-2.2-(passenger-standalone)',
'ruby-2.2-(puma)',
'ruby-2.3-(passenger-standalone)',
'ruby-2.3-(puma)',
'tomcat-6',
'tomcat-7',
'tomcat-7-java-6',
'tomcat-7-java-7',
'tomcat-8-java-8',
'Node.js',
'PHP',
'Python',
'Ruby',
'Tomcat',
'IIS',
'Docker',
'Multi-container Docker',
'Glassfish',
'Go',
'Java',
'Packer',
'64bit Windows Server Core 2016 v1.2.0 running IIS 10.0',
'64bit Windows Server 2016 v1.2.0 running IIS 10.0',
'64bit Windows Server Core 2012 R2 v1.2.0 running IIS 8.5',
'64bit Windows Server 2012 R2 v1.2.0 running IIS 8.5',
'64bit Windows Server 2012 v1.2.0 running IIS 8',
'64bit Windows Server 2008 R2 v1.2.0 running IIS 7.5',
'64bit Amazon Linux 2017.03 v2.5.3 running Java 8',
'64bit Amazon Linux 2017.03 v2.5.3 running Java 7',
'64bit Amazon Linux 2017.03 v4.2.1 running Node.js',
'64bit Amazon Linux 2017.03 v4.2.0 running Node.js',
'64bit Amazon Linux 2017.03 v4.1.1 running Node.js',
'64bit Amazon Linux 2015.09 v2.0.8 running Node.js',
'64bit Amazon Linux 2015.03 v1.4.6 running Node.js',
'64bit Amazon Linux 2014.03 v1.1.0 running Node.js',
'32bit Amazon Linux 2014.03 v1.1.0 running Node.js',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.4',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.5',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.6',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 7.0',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.4',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.6',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 7.0',
'64bit Amazon Linux 2017.03 v2.4.1 running PHP 5.6',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.4',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.5',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.6',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 7.0',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.6',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.5',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.4',
'64bit Amazon Linux 2014.03 v1.1.0 running PHP 5.5',
'64bit Amazon Linux 2014.03 v1.1.0 running PHP 5.4',
'32bit Amazon Linux 2014.03 v1.1.0 running PHP 5.5',
'32bit Amazon Linux 2014.03 v1.1.0 running PHP 5.4',
'64bit Amazon Linux running PHP 5.3',
'32bit Amazon Linux running PHP 5.3',
'64bit Amazon Linux 2017.03 v2.5.0 running Python 3.4',
'64bit Amazon Linux 2017.03 v2.5.0 running Python',
'64bit Amazon Linux 2017.03 v2.5.0 running Python 2.7',
'64bit Amazon Linux 2015.03 v1.4.6 running Python 3.4',
'64bit Amazon Linux 2015.03 v1.4.6 running Python 2.7',
'64bit Amazon Linux 2015.03 v1.4.6 running Python',
'64bit Amazon Linux 2014.03 v1.1.0 running Python 2.7',
'64bit Amazon Linux 2014.03 v1.1.0 running Python',
'32bit Amazon Linux 2014.03 v1.1.0 running Python 2.7',
'32bit Amazon Linux 2014.03 v1.1.0 running Python',
'64bit Amazon Linux running Python',
'32bit Amazon Linux running Python',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.3 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.2 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.3 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.2 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 1.9.3',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.2 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.2 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 1.9.3',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 1.9.3',
'32bit Amazon Linux 2014.03 v1.1.0 running Ruby 1.9.3',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 8 Java 8',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 7 Java 7',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 7 Java 6',
'64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 8 Java 8',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 8 Java 8',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 7 Java 7',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 7 Java 6',
'64bit Amazon Linux 2015.03 v1.4.4 running Tomcat 7 Java 7',
'64bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 7',
'64bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 6',
'32bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 7',
'32bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 6',
'64bit Amazon Linux running Tomcat 7',
'64bit Amazon Linux running Tomcat 6',
'32bit Amazon Linux running Tomcat 7',
'32bit Amazon Linux running Tomcat 6',
'64bit Windows Server Core 2012 R2 running IIS 8.5',
'64bit Windows Server 2012 R2 running IIS 8.5',
'64bit Windows Server 2012 running IIS 8',
'64bit Windows Server 2008 R2 running IIS 7.5',
'64bit Amazon Linux 2017.03 v2.7.2 running Docker 17.03.1-ce',
'64bit Amazon Linux 2017.03 v2.7.1 running Docker 17.03.1-ce',
'64bit Amazon Linux 2017.03 v2.6.0 running Docker 1.12.6',
'64bit Amazon Linux 2016.09 v2.3.0 running Docker 1.11.2',
'64bit Amazon Linux 2016.03 v2.1.6 running Docker 1.11.2',
'64bit Amazon Linux 2016.03 v2.1.0 running Docker 1.9.1',
'64bit Amazon Linux 2015.09 v2.0.6 running Docker 1.7.1',
'64bit Amazon Linux 2015.03 v1.4.6 running Docker 1.6.2',
'64bit Amazon Linux 2017.03 v2.7.3 running Multi-container Docker 17.03.1-ce (Generic)',
'64bit Amazon Linux 2016.03 v2.1.6 running Multi-container Docker 1.11.2 (Generic)',
'64bit Amazon Linux 2016.03 v2.1.0 running Multi-container Docker 1.9.1 (Generic)',
'64bit Amazon Linux 2015.03 v1.4.6 running Multi-container Docker 1.6.2 (Generic)',
'64bit Debian jessie v2.7.2 running GlassFish 4.1 Java 8 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running GlassFish 4.0 Java 7 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running GlassFish 4.1 Java 8 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running GlassFish 4.0 Java 7 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Go 1.4 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Go 1.3 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Go 1.4 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Go 1.3 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Python 3.4 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Python 3.4 (Preconfigured - Docker)',
'64bit Amazon Linux 2017.03 v2.5.1 running Go 1.8',
'64bit Amazon Linux 2016.09 v2.3.3 running Go 1.6',
'64bit Amazon Linux 2016.09 v2.3.0 running Go 1.5',
'64bit Amazon Linux 2016.03 v2.1.0 running Go 1.4',
'64bit Amazon Linux 2017.03 v2.3.1 running Packer 1.0.3',
'64bit Amazon Linux 2017.03 v2.2.2 running Packer 1.0.0',
'Node.js',
'PHP 5.6',
'PHP 5.3',
'Python 3.4',
'Python',
'Ruby 2.3 (Puma)',
'Ruby 2.3 (Passenger Standalone)',
'Tomcat 8 Java 8',
'Tomcat 7',
'IIS 8.5',
'IIS 8.5',
'IIS 8',
'Docker 1.12.6',
'Multi-container Docker 17.03.1-ce (Generic)',
'Multi-container Docker 1.11.2 (Generic)',
'GlassFish 4.1 Java 8 (Preconfigured - Docker)',
'Go 1.4 (Preconfigured - Docker)',
'Python 3.4 (Preconfigured - Docker)',
'Java 8',
'Java 7',
'Go 1.8',
'Go 1.6',
'Go 1.5',
'Go 1.4',
'Packer 1.0.0',
]
solution_stacks = [
'64bit Windows Server Core 2016 v1.2.0 running IIS 10.0',
'64bit Windows Server 2016 v1.2.0 running IIS 10.0',
'64bit Windows Server Core 2012 R2 v1.2.0 running IIS 8.5',
'64bit Windows Server 2012 R2 v1.2.0 running IIS 8.5',
'64bit Windows Server 2012 v1.2.0 running IIS 8',
'64bit Windows Server 2008 R2 v1.2.0 running IIS 7.5',
'64bit Amazon Linux 2017.03 v2.5.3 running Java 8',
'64bit Amazon Linux 2017.03 v2.5.3 running Java 7',
'64bit Amazon Linux 2017.03 v4.2.1 running Node.js',
'64bit Amazon Linux 2017.03 v4.2.0 running Node.js',
'64bit Amazon Linux 2017.03 v4.1.1 running Node.js',
'64bit Amazon Linux 2015.09 v2.0.8 running Node.js',
'64bit Amazon Linux 2015.03 v1.4.6 running Node.js',
'64bit Amazon Linux 2014.03 v1.1.0 running Node.js',
'32bit Amazon Linux 2014.03 v1.1.0 running Node.js',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.4',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.5',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 5.6',
'64bit Amazon Linux 2017.03 v2.4.3 running PHP 7.0',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.4',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 5.6',
'64bit Amazon Linux 2017.03 v2.4.2 running PHP 7.0',
'64bit Amazon Linux 2017.03 v2.4.1 running PHP 5.6',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.4',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.5',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 5.6',
'64bit Amazon Linux 2016.03 v2.1.6 running PHP 7.0',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.6',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.5',
'64bit Amazon Linux 2015.03 v1.4.6 running PHP 5.4',
'64bit Amazon Linux 2014.03 v1.1.0 running PHP 5.5',
'64bit Amazon Linux 2014.03 v1.1.0 running PHP 5.4',
'32bit Amazon Linux 2014.03 v1.1.0 running PHP 5.5',
'32bit Amazon Linux 2014.03 v1.1.0 running PHP 5.4',
'64bit Amazon Linux running PHP 5.3',
'32bit Amazon Linux running PHP 5.3',
'64bit Amazon Linux 2017.03 v2.5.0 running Python 3.4',
'64bit Amazon Linux 2017.03 v2.5.0 running Python',
'64bit Amazon Linux 2017.03 v2.5.0 running Python 2.7',
'64bit Amazon Linux 2015.03 v1.4.6 running Python 3.4',
'64bit Amazon Linux 2015.03 v1.4.6 running Python 2.7',
'64bit Amazon Linux 2015.03 v1.4.6 running Python',
'64bit Amazon Linux 2014.03 v1.1.0 running Python 2.7',
'64bit Amazon Linux 2014.03 v1.1.0 running Python',
'32bit Amazon Linux 2014.03 v1.1.0 running Python 2.7',
'32bit Amazon Linux 2014.03 v1.1.0 running Python',
'64bit Amazon Linux running Python',
'32bit Amazon Linux running Python',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.3 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.2 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.3 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.2 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2017.03 v2.4.3 running Ruby 1.9.3',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.2 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.2 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2015.03 v1.4.6 running Ruby 1.9.3',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.1 (Puma)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.1 (Passenger Standalone)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.0 (Puma)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 2.0 (Passenger Standalone)',
'64bit Amazon Linux 2014.03 v1.1.0 running Ruby 1.9.3',
'32bit Amazon Linux 2014.03 v1.1.0 running Ruby 1.9.3',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 8 Java 8',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 7 Java 7',
'64bit Amazon Linux 2017.03 v2.6.3 running Tomcat 7 Java 6',
'64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 8 Java 8',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 8 Java 8',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 7 Java 7',
'64bit Amazon Linux 2015.03 v1.4.5 running Tomcat 7 Java 6',
'64bit Amazon Linux 2015.03 v1.4.4 running Tomcat 7 Java 7',
'64bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 7',
'64bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 6',
'32bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 7',
'32bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 6',
'64bit Amazon Linux running Tomcat 7',
'64bit Amazon Linux running Tomcat 6',
'32bit Amazon Linux running Tomcat 7',
'32bit Amazon Linux running Tomcat 6',
'64bit Windows Server Core 2012 R2 running IIS 8.5',
'64bit Windows Server 2012 R2 running IIS 8.5',
'64bit Windows Server 2012 running IIS 8',
'64bit Windows Server 2008 R2 running IIS 7.5',
'64bit Amazon Linux 2017.03 v2.7.2 running Docker 17.03.1-ce',
'64bit Amazon Linux 2017.03 v2.7.1 running Docker 17.03.1-ce',
'64bit Amazon Linux 2017.03 v2.6.0 running Docker 1.12.6',
'64bit Amazon Linux 2016.09 v2.3.0 running Docker 1.11.2',
'64bit Amazon Linux 2016.03 v2.1.6 running Docker 1.11.2',
'64bit Amazon Linux 2016.03 v2.1.0 running Docker 1.9.1',
'64bit Amazon Linux 2015.09 v2.0.6 running Docker 1.7.1',
'64bit Amazon Linux 2015.03 v1.4.6 running Docker 1.6.2',
'64bit Amazon Linux 2017.03 v2.7.3 running Multi-container Docker 17.03.1-ce (Generic)',
'64bit Amazon Linux 2016.03 v2.1.6 running Multi-container Docker 1.11.2 (Generic)',
'64bit Amazon Linux 2016.03 v2.1.0 running Multi-container Docker 1.9.1 (Generic)',
'64bit Amazon Linux 2015.03 v1.4.6 running Multi-container Docker 1.6.2 (Generic)',
'64bit Debian jessie v2.7.2 running GlassFish 4.1 Java 8 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running GlassFish 4.0 Java 7 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running GlassFish 4.1 Java 8 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running GlassFish 4.0 Java 7 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Go 1.4 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Go 1.3 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Go 1.4 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Go 1.3 (Preconfigured - Docker)',
'64bit Debian jessie v2.7.2 running Python 3.4 (Preconfigured - Docker)',
'64bit Debian jessie v1.4.6 running Python 3.4 (Preconfigured - Docker)',
'64bit Amazon Linux 2017.03 v2.5.1 running Go 1.8',
'64bit Amazon Linux 2016.09 v2.3.3 running Go 1.6',
'64bit Amazon Linux 2016.09 v2.3.0 running Go 1.5',
'64bit Amazon Linux 2016.03 v2.1.0 running Go 1.4',
'64bit Amazon Linux 2017.03 v2.3.1 running Packer 1.0.3',
'64bit Amazon Linux 2017.03 v2.2.2 running Packer 1.0.0',
]
solution_stacks = [SolutionStack(solution_stack) for solution_stack in solution_stacks]
custom_platforms = [
'arn:aws:elasticbeanstalk:us-west-2:12345678:platform/custom-platform-1/1.0.0',
]
solution_stack_lister_mock.return_value = solution_stacks
custom_platforms_lister_mock.return_value = custom_platforms
for solution_string in solution_strings:
solution_stack_ops.find_solution_stack_from_string(solution_string)
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
@mock.patch('ebcli.operations.platform_version_ops.list_custom_platform_versions')
def test_find_solution_stack_from_string__custom_platform(
self,
custom_platforms_lister_mock,
solution_stack_lister_mock
):
solution_stack_lister_mock.return_value = []
custom_platforms_lister_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:12345678:platform/custom-platform-1/1.0.0',
]
custom_platform_inputs = [
'arn:aws:elasticbeanstalk:us-west-2:12345678:platform/custom-platform-1/1.0.0',
'custom-platform-1'
]
for custom_platform_input in custom_platform_inputs:
self.assertEqual(
PlatformVersion('arn:aws:elasticbeanstalk:us-west-2:12345678:platform/custom-platform-1/1.0.0'),
solution_stack_ops.find_solution_stack_from_string(custom_platform_input)
)
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
@mock.patch('ebcli.operations.solution_stack_ops.platform_arn_to_solution_stack')
def test_find_solution_stack_from_string__eb_managed_platform(
self,
platform_arn_to_solution_stack_mock,
solution_stack_lister_mock
):
solution_stack_lister_mock.return_value = [
'64bit Amazon Linux 2017.09 v2.7.1 running Tomcat 8 Java 8'
]
platform_arn_to_solution_stack_mock.return_value = SolutionStack(
'64bit Amazon Linux 2017.09 v2.7.1 running Tomcat 8 Java 8'
)
self.assertEqual(
SolutionStack('64bit Amazon Linux 2017.09 v2.7.1 running Tomcat 8 Java 8'),
solution_stack_ops.find_solution_stack_from_string(
'arn:aws:elasticbeanstalk:us-west-2::platform/Tomcat 8 with Java 8 running on 64bit Amazon Linux/2.7.1'
)
)
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
def test_find_solution_stack_from_string__retrieves_latest(self, solution_stacks_retriever_mock):
solution_stacks = [
SolutionStack('64bit Amazon Linux 2017.03 v4.2.1 running Node.js'),
SolutionStack('64bit Amazon Linux 2017.03 v4.2.0 running Node.js')
]
solution_stacks_retriever_mock.return_value = solution_stacks
self.assertEqual(
SolutionStack('64bit Amazon Linux 2017.03 v4.2.1 running Node.js'),
solution_stack_ops.find_solution_stack_from_string(
'64bit Amazon Linux 2017.03 v4.2.0 running Node.js',
find_newer=True
)
)
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
def test_find_solution_stack_from_string__retrieves_latest_python_solution_Stack(self, solution_stacks_retriever_mock):
solution_stacks = [
SolutionStack('64bit Amazon Linux 2014.09 v1.1.0 running Python 2.7'),
SolutionStack('64bit Amazon Linux 2014.09 v1.1.0 running Python 3.6')
]
solution_stacks_retriever_mock.return_value = solution_stacks
self.assertEqual(
SolutionStack('64bit Amazon Linux 2014.09 v1.1.0 running Python 2.7'),
solution_stack_ops.find_solution_stack_from_string(
'Python 2.7',
find_newer=True
)
)
@mock.patch('ebcli.lib.elasticbeanstalk.get_available_solution_stacks')
@mock.patch('ebcli.operations.platform_version_ops.get_latest_custom_platform_version')
def test_find_solution_stack_from_string__return_latest_custom_platform(
self,
get_latest_custom_platform_version_mock,
available_solution_stacks_mock
):
available_solution_stacks_mock.return_value = []
latest_custom_platform_arn = 'arn:aws:elasticbeanstalk:us-west-2:123123123:platform/custom-platform-2/1.0.3'
get_latest_custom_platform_version_mock.return_value = PlatformVersion(latest_custom_platform_arn)
self.assertEqual(
PlatformVersion(latest_custom_platform_arn),
solution_stack_ops.find_solution_stack_from_string(
latest_custom_platform_arn,
find_newer=True
)
)
def test_get_default_solution_stack(self):
ebcli_root = os.getcwd()
test_dir = 'testDir'
os.mkdir(test_dir)
os.mkdir(os.path.join(test_dir, '.elasticbeanstalk'))
os.chdir(test_dir)
with open(os.path.join('.elasticbeanstalk', 'config.yml'), 'w') as config_yml:
config_yml_contents = {
'branch-defaults': {
'default': {
'environment': 'default-environment'
}
},
'global': {
'application_name': 'default-application',
'default_platform': 'Python 3.6'
}
}
yaml.dump(config_yml_contents, config_yml)
config_yml.close()
try:
self.assertEqual(
'Python 3.6',
solution_stack_ops.get_default_solution_stack()
)
finally:
os.chdir(ebcli_root)
shutil.rmtree(test_dir)
@mock.patch('ebcli.lib.utils.prompt_for_index_in_list')
def test_prompt_for_solution_stack_version(self, index_prompter_mock):
matching_language_versions = [
{
'PlatformShorthand': 'Tomcat 8 Java 8',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.09 v2.7.0 running Tomcat 8 Java 8'
},
{
'PlatformShorthand': 'Tomcat 7 Java 7',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.09 v2.7.0 running Tomcat 7 Java 7'
},
{
'PlatformShorthand': 'Tomcat 7 Java 6',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 7 Java 6'
}
]
index_prompter_mock.return_value = 2
self.assertEqual(
'Tomcat 7 Java 6',
solution_stack_ops.prompt_for_solution_stack_version(matching_language_versions)
)
def test_resolve_language_version__exactly_one_version_found(self):
matching_language_versions = [
{
'PlatformShorthand': 'Node.js',
'LanguageName': 'Node.js',
'SolutionStack': '64bit Amazon Linux 2017.09 v4.4.0 running Node.js'
}
]
SolutionStack.group_solution_stacks_by_platform_shorthand = mock.MagicMock(return_value=matching_language_versions)
self.assertEqual(
'64bit Amazon Linux 2017.09 v4.4.0 running Node.js',
solution_stack_ops.resolve_language_version(
'Node.js',
[
mock.MagicMock('solution-stack-1'),
mock.MagicMock('solution-stack-2')
]
)
)
@mock.patch('ebcli.operations.solution_stack_ops.prompt_for_solution_stack_version')
def test_resolve_language_version__multiple_versions_found(
self,
solution_stack_prompter_mock
):
matching_language_versions = [
{
'PlatformShorthand': 'Tomcat 8 Java 8',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.09 v2.7.0 running Tomcat 8 Java 8'
},
{
'PlatformShorthand': 'Tomcat 7 Java 7',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.09 v2.7.0 running Tomcat 7 Java 7'
},
{
'PlatformShorthand': 'Tomcat 7 Java 6',
'LanguageName': 'Tomcat',
'SolutionStack': '64bit Amazon Linux 2017.03 v2.6.1 running Tomcat 7 Java 6'
}
]
solution_stack_prompter_mock.return_value = matching_language_versions[0]['PlatformShorthand']
SolutionStack.group_solution_stacks_by_platform_shorthand = mock.MagicMock(return_value=matching_language_versions)
self.assertEqual(
'64bit Amazon Linux 2017.09 v2.7.0 running Tomcat 8 Java 8',
solution_stack_ops.resolve_language_version(
'Tomcat',
[
mock.MagicMock('solution-stack-1'),
mock.MagicMock('solution-stack-2')
]
)
)
def test_platform_arn_to_solution_stack__custom_platform_arn(self):
platform_arn = 'arn:aws:elasticbeanstalk:us-west-2:123123123:platform/custom-platform-test-test-4/1.0.0'
self.assertIsNone(solution_stack_ops.platform_arn_to_solution_stack(platform_arn))
def test_platform_arn_to_solution_stack__preconfigured_solution_stack_arns(self):
platform_arns = [
'arn:aws:elasticbeanstalk:us-west-2::platform/Docker running on 64bit Amazon Linux/2.8.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Elastic Beanstalk Packer Builder/2.4.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Go 1 running on 64bit Amazon Linux/2.7.1',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 10.0 running on 64bit Windows Server 2016/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 10.0 running on 64bit Windows Server Core 2016/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 7.5 running on 64bit Windows Server 2008 R2/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 7.5 running on 64bit Windows Server 2008 R2/0.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8 running on 64bit Windows Server 2012/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8 running on 64bit Windows Server 2012/0.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8.5 running on 64bit Windows Server 2012 R2/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8.5 running on 64bit Windows Server 2012 R2/0.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8.5 running on 64bit Windows Server Core 2012 R2/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/IIS 8.5 running on 64bit Windows Server Core 2012 R2/0.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Java 8 running on 64bit Amazon Linux/2.6.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Multi-container Docker running on 64bit Amazon Linux/2.8.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Node.js running on 32bit Amazon Linux/1.2.1',
'arn:aws:elasticbeanstalk:us-west-2::platform/Node.js running on 32bit Amazon Linux 2014.03/1.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Passenger with Ruby 1.9.3 running on 32bit Amazon Linux/1.2.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Passenger with Ruby 1.9.3 running on 32bit Amazon Linux 2014.03/1.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Passenger with Ruby 2.4 running on 64bit Amazon Linux/2.6.1',
'arn:aws:elasticbeanstalk:us-west-2::platform/Passenger with Ruby 2.4 running on 64bit Amazon Linux/2.6.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.5.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Preconfigured Docker - GlassFish 4.0 with Java 7 running on 64bit Debian/2.8.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Preconfigured Docker - Python 3.4 running on 64bit Debian/2.8.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Puma with Ruby 2.4 running on 64bit Amazon Linux/2.6.1',
'arn:aws:elasticbeanstalk:us-west-2::platform/Python 2.7 running on 64bit Amazon Linux 2014.03/1.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Python 3.4 running on 64bit Amazon Linux/2.6.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Tomcat 7 with Java 7 running on 32bit Amazon Linux 2014.03/1.1.0',
'arn:aws:elasticbeanstalk:us-west-2::platform/Tomcat 8 with Java 8 running on 64bit Amazon Linux/2.7.1',
]
platform_descriptions = [
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.8.0 running Docker 17.06.2-ce'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.4.0 running Packer 1.0.3'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.7.1 running Go 1.9'
},
{
'SolutionStackName': '64bit Windows Server 2016 v1.2.0 running IIS 10.0'
},
{
'SolutionStackName': '64bit Windows Server Core 2016 v1.2.0 running IIS 10.0'
},
{
'SolutionStackName': '64bit Windows Server 2008 R2 v1.2.0 running IIS 7.5'
},
{
'SolutionStackName': '64bit Windows Server 2008 R2 running IIS 7.5'
},
{
'SolutionStackName': '64bit Windows Server 2012 v1.2.0 running IIS 8'
},
{
'SolutionStackName': '64bit Windows Server 2012 running IIS 8'
},
{
'SolutionStackName': '64bit Windows Server 2012 R2 v1.2.0 running IIS 8.5'
},
{
'SolutionStackName': '64bit Windows Server 2012 R2 running IIS 8.5'
},
{
'SolutionStackName': '64bit Windows Server Core 2012 R2 v1.2.0 running IIS 8.5'
},
{
'SolutionStackName': '64bit Windows Server Core 2012 R2 running IIS 8.5'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.6.0 running Java 8'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.8.0 running Multi-container Docker 17.06.2-ce (Generic)'
},
{
'SolutionStackName': '32bit Amazon Linux 2014.09 v1.2.1 running Node.js'
},
{
'SolutionStackName': '32bit Amazon Linux 2014.03 v1.1.0 running Node.js'
},
{
'SolutionStackName': '32bit Amazon Linux 2014.09 v1.2.0 running Ruby 1.9.3'
},
{
'SolutionStackName': '32bit Amazon Linux 2014.03 v1.1.0 running Ruby 1.9.3'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.6.1 running Ruby 2.4 (Passenger Standalone)'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.6.0 running Ruby 2.4 (Passenger Standalone)'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.03 v2.5.0 running PHP 7.1'
},
{
'SolutionStackName': '64bit Debian jessie v2.8.0 running GlassFish 4.0 Java 7 (Preconfigured - Docker)'
},
{
'SolutionStackName': '64bit Debian jessie v2.8.0 running Python 3.4 (Preconfigured - Docker)'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.6.1 running Ruby 2.4 (Puma)'
},
{
'SolutionStackName': '64bit Amazon Linux 2014.03 v1.1.0 running Python 2.7'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.6.0 running Python 3.4'
},
{
'SolutionStackName': '32bit Amazon Linux 2014.03 v1.1.0 running Tomcat 7 Java 7'
},
{
'SolutionStackName': '64bit Amazon Linux 2017.09 v2.7.1 running Tomcat 8 Java 8'
},
]
for index in range(0, len(platform_arns)):
with mock.patch('ebcli.lib.elasticbeanstalk.describe_platform_version') as describe_platform_version_mock:
describe_platform_version_mock.return_value = platform_descriptions[index]
self.assertEqual(
SolutionStack(platform_descriptions[index]['SolutionStackName']),
solution_stack_ops.platform_arn_to_solution_stack(platform_arns[index])
)
| 51.84478
| 138
| 0.599025
| 5,359
| 37,743
| 4.144057
| 0.040866
| 0.123334
| 0.154899
| 0.089157
| 0.880629
| 0.848613
| 0.82826
| 0.805746
| 0.76603
| 0.755358
| 0
| 0.147624
| 0.298069
| 37,743
| 727
| 139
| 51.916094
| 0.690635
| 0.014201
| 0
| 0.490313
| 0
| 0.067064
| 0.600393
| 0.086654
| 0
| 0
| 0
| 0
| 0.016393
| 1
| 0.017884
| false
| 0.043219
| 0.011923
| 0
| 0.031297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c332e729cff1eda0d553ac9db17444719c5e682e
| 88,228
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_task_work.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_task_work.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_task_work.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import weakref
from nfv_common.helpers import coroutine
from nfv_common import debug
from nfv_common import state_machine
from nfv_vim import nfvi
from nfv_vim.instance_fsm._instance_defs import INSTANCE_EVENT
DLOG = debug.debug_get_logger('nfv_vim.state_machine.instance_task_work')
empty_reason = ''
class QueryHypervisorTaskWork(state_machine.StateTaskWork):
"""
Query-Hypervisor Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(QueryHypervisorTaskWork, self).__init__(
'query-hypervisor_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for query hypervisor
"""
response = (yield)
if self.task is not None:
DLOG.debug("Query-Hypervisor callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.nfvi_hypervisor = response['result-data']
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Query-Hypervisor callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.nfvi_hypervisor = None
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run query-hypervisor
"""
from nfv_vim import tables
DLOG.verbose("Query-Hypervisor for %s." % self._instance.name)
hypervisor_table = tables.tables_get_hypervisor_table()
hypervisor = hypervisor_table.get_by_host_name(
self._instance.host_name)
if hypervisor is not None:
nfvi.nfvi_get_hypervisor(hypervisor.uuid, self._callback())
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
else:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
class LiveMigrateTaskWork(state_machine.StateTaskWork):
"""
Live-Migrate Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(LiveMigrateTaskWork, self).__init__(
'live-migrate-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for live-migrate instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Live-Migrate-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Live-Migrate-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run live-migrate instance
"""
DLOG.debug("Live-Migrate-Instance for %s." % self._instance.name)
action_data = None
nfvi_action_data = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
nfvi_action_data = action_data.get_nfvi_action_data()
if action_data is None or nfvi_action_data is None:
nfvi.nfvi_live_migrate_instance(self._instance.uuid,
self._callback())
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
context = action_data.context
action_parameters = nfvi_action_data.action_parameters
if action_parameters is not None:
to_host_name = action_parameters.get(
nfvi.objects.v1.INSTANCE_LIVE_MIGRATE_OPTION.HOST)
block_storage_migration = action_parameters.get(
nfvi.objects.v1.INSTANCE_LIVE_MIGRATE_OPTION.BLOCK_MIGRATION,
False)
nfvi.nfvi_live_migrate_instance(
self._instance.uuid, self._callback(), to_host_name,
block_storage_migration, context=context)
else:
# Let nova decide whether to do a block migration when the VIM
# is initiating the live migration.
nfvi.nfvi_live_migrate_instance(
self._instance.uuid, self._callback(),
block_storage_migration='auto', context=context)
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ColdMigrateTaskWork(state_machine.StateTaskWork):
"""
Cold-Migrate Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ColdMigrateTaskWork, self).__init__(
'cold-migrate-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for cold-migrate instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Cold-Migrate-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Cold-Migrate-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run cold-migrate instance
"""
DLOG.verbose("Cold-Migrate-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_cold_migrate_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ColdMigrateConfirmTaskWork(state_machine.StateTaskWork):
"""
Cold-Migrate-Confirm Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ColdMigrateConfirmTaskWork, self).__init__(
'cold-migrate-confirm-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for cold-migrate-confirm instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Cold-Migrate-Confirm-Instance callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Cold-Migrate-Confirm-Instance callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run cold-migrate-confirm instance
"""
DLOG.verbose("Cold-Migrate-Confirm-Instance for %s."
% self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_cold_migrate_confirm_instance(self._instance.uuid,
self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ColdMigrateRevertTaskWork(state_machine.StateTaskWork):
"""
Cold-Migrate-Revert Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ColdMigrateRevertTaskWork, self).__init__(
'cold-migrate-revert-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
self._from_host_name = instance.host_name
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for cold-migrate-revert instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Cold-Migrate-Revert-Instance callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
# A cold-migrate revert causes a movement of the instance back
# to the original host. Need to wait for this movement to
# complete.
if 0 == self._instance.max_cold_migrate_wait_in_secs:
DLOG.verbose("Cold-Migrate-Revert-Instance instance has a "
"cold-migrate timeout of zero, not waiting.")
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.extend_timeout(
self._instance.max_cold_migrate_wait_in_secs)
else:
if self.force_pass:
DLOG.info("Cold-Migrate-Revert-Instance callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run cold-migrate-revert instance
"""
DLOG.verbose("Cold-Migrate-Revert-Instance for %s."
% self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_cold_migrate_revert_instance(self._instance.uuid,
self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action proceed notifications
"""
handled = False
if INSTANCE_EVENT.NFVI_HOST_CHANGED == event:
if self._from_host_name != self._instance.host_name:
DLOG.debug("Cold-Migrate-Revert-Instance for %s has moved from "
"host %s to host %s." % (self._instance.name,
self._from_host_name,
self._instance.host_name))
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
handled = True
return handled
class ResizeTaskWork(state_machine.StateTaskWork):
"""
Resize Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ResizeTaskWork, self).__init__(
'resize-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for resize instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Resize-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Resize-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run resize instance
"""
if self._instance.action_fsm is None:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
action_data = self._instance.action_fsm_data
if action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
context = action_data.context
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
action_parameters = nfvi_action_data.action_parameters
if action_parameters is None:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
instance_type_uuid = action_parameters.get(
nfvi.objects.v1.INSTANCE_RESIZE_OPTION.INSTANCE_TYPE_UUID, None)
if instance_type_uuid is None:
return state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason
DLOG.verbose("Resize-Instance for %s, instance_type_uuid=%s."
% (self._instance.name, instance_type_uuid))
nfvi.nfvi_resize_instance(self._instance.uuid, instance_type_uuid,
self._callback(), context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ResizeConfirmTaskWork(state_machine.StateTaskWork):
"""
Resize-Confirm Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ResizeConfirmTaskWork, self).__init__(
'resize-confirm-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for resize-confirm instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Resize-Confirm-Instance callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Resize-Confirm-Instance callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run resize-confirm instance
"""
DLOG.verbose("Resize-Confirm-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_resize_confirm_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ResizeRevertTaskWork(state_machine.StateTaskWork):
"""
Resize-Revert Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ResizeRevertTaskWork, self).__init__(
'resize-revert-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
self._from_host_name = instance.host_name
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for resize-revert instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Resize-Revert-Instance callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
# A resize revert might causes a movement of the instance back
# to the original host. Need to wait for this movement to
# complete.
if 0 == self._instance.max_resize_wait_in_secs:
DLOG.verbose("Resize-Revert-Instance instance has a "
"timeout of zero, not waiting.")
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.extend_timeout(
self._instance.max_resize_wait_in_secs)
else:
if self.force_pass:
DLOG.info("Resize-Revert-Instance callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run resize-revert instance
"""
DLOG.verbose("Resize-Revert-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_resize_revert_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action proceed notifications
"""
handled = False
if INSTANCE_EVENT.NFVI_HOST_CHANGED == event:
if self._from_host_name != self._instance.host_name:
DLOG.debug("Resize-Revert-Instance for %s has moved from "
"host %s to host %s." % (self._instance.name,
self._from_host_name,
self._instance.host_name))
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
handled = True
elif INSTANCE_EVENT.RESIZE_REVERT_COMPLETED == event:
DLOG.debug("Resize-Revert-Instance for %s completed"
% self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
handled = True
return handled
class EvacuateTaskWork(state_machine.StateTaskWork):
"""
Evacuate Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(EvacuateTaskWork, self).__init__(
'evacuate-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=120)
self._instance_reference = weakref.ref(instance)
self._evacuate_inprogress = False
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for evacuate instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Evacuate-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Evacuate-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def _do_evacuate(self):
"""
Perform the evacuate
"""
self._evacuate_inprogress = True
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
DLOG.debug("Evacuate-Instance for %s." % self._instance.name)
nfvi.nfvi_evacuate_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
def run(self):
"""
Run evacuate instance
"""
from nfv_vim import tables
host_table = tables.tables_get_host_table()
host = host_table.get(self._instance.host_name, None)
if host is not None:
if not (host.is_offline() or host.is_power_off()):
# We must wait for the compute host to go offline or power off
# before attempting to evacuate the instance. It is not safe to
# evacuate an instance that may still be running.
DLOG.debug("Evacuate-Instance for %s, but host %s is not "
"offline or power-off." %
(self._instance.name, host.name))
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
self._do_evacuate()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action proceed notifications
"""
from nfv_vim import tables
handled = False
if not self._evacuate_inprogress:
if INSTANCE_EVENT.NFVI_HOST_OFFLINE == event:
self._do_evacuate()
handled = True
elif INSTANCE_EVENT.AUDIT == event:
host_table = tables.tables_get_host_table()
host = host_table.get(self._instance.host_name, None)
if host is not None:
if host.is_offline():
self._do_evacuate()
handled = True
return handled
class StartTaskWork(state_machine.StateTaskWork):
"""
Start Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(StartTaskWork, self).__init__(
'start-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for start instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Start-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Start-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run start instance
"""
DLOG.verbose("Start-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_start_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class StopTaskWork(state_machine.StateTaskWork):
"""
Stop Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(StopTaskWork, self).__init__(
'stop-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for stop instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Stop-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Stop-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run stop instance
"""
DLOG.verbose("Stop-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_stop_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class PauseTaskWork(state_machine.StateTaskWork):
"""
Pause Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(PauseTaskWork, self).__init__(
'pause-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for pause instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Pause-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Pause-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run pause instance
"""
DLOG.verbose("Pause-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_pause_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class UnpauseTaskWork(state_machine.StateTaskWork):
"""
Unpause Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(UnpauseTaskWork, self).__init__(
'unpause-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for unpause instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Unpause-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Unpause-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run unpause instance
"""
DLOG.verbose("Unpause-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_unpause_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class SuspendTaskWork(state_machine.StateTaskWork):
"""
Suspend Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(SuspendTaskWork, self).__init__(
'suspend-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for suspend instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Suspend-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Suspend-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run suspend instance
"""
DLOG.verbose("Suspend-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_suspend_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class ResumeTaskWork(state_machine.StateTaskWork):
"""
Resume Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(ResumeTaskWork, self).__init__(
'resume-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for resume instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Resume-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Resume-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run resume instance
"""
DLOG.verbose("Resume-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_resume_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class RebootTaskWork(state_machine.StateTaskWork):
"""
Reboot Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(RebootTaskWork, self).__init__(
'reboot-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for reboot instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Reboot-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Reboot-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run reboot instance
"""
action_data = None
nfvi_action_data = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
nfvi_action_data = action_data.get_nfvi_action_data()
if action_data is None or nfvi_action_data is None:
DLOG.verbose("Reboot-Instance for %s, graceful_shutdown=False"
% self._instance.name)
nfvi.nfvi_reboot_instance(self._instance.uuid, self._callback(),
False)
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
context = action_data.context
action_parameters = nfvi_action_data.action_parameters
if action_parameters is not None:
graceful_shutdown = action_parameters.get(
nfvi.objects.v1.INSTANCE_REBOOT_OPTION.GRACEFUL_SHUTDOWN,
False)
else:
graceful_shutdown = False
DLOG.verbose("Reboot-Instance for %s, graceful_shutdown=%s"
% (self._instance.name, graceful_shutdown))
nfvi.nfvi_reboot_instance(self._instance.uuid, graceful_shutdown,
self._callback(), context=context)
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class RebuildTaskWork(state_machine.StateTaskWork):
"""
Rebuild Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(RebuildTaskWork, self).__init__(
'rebuild-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for rebuild instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Rebuild-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Rebuild-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run rebuild instance
"""
DLOG.verbose("Rebuild-Instance for %s, image_uuid=%s"
% (self._instance.name, self._instance.image_uuid))
context = None
action_parameters = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is not None:
action_parameters = nfvi_action_data.action_parameters
image_uuid = self._instance.image_uuid
instance_name = self._instance.name
if action_parameters is not None:
image_uuid = action_parameters.get(
nfvi.objects.v1.INSTANCE_REBUILD_OPTION.INSTANCE_IMAGE_UUID,
self._instance.image_uuid)
instance_name = action_parameters.get(
nfvi.objects.v1.INSTANCE_REBUILD_OPTION.INSTANCE_NAME,
self._instance.name)
nfvi.nfvi_rebuild_instance(self._instance.uuid, instance_name,
image_uuid, self._callback(), context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class FailTaskWork(state_machine.StateTaskWork):
"""
Fail Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(FailTaskWork, self).__init__(
'fail-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for fail instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Fail-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Fail-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run fail instance
"""
DLOG.verbose("Fail-Instance for %s." % self._instance.name)
context = None
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
context = action_data.context
nfvi.nfvi_fail_instance(self._instance.uuid, self._callback(),
context=context)
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class DeleteTaskWork(state_machine.StateTaskWork):
"""
Delete Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(DeleteTaskWork, self).__init__(
'delete-instance_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for delete instance
"""
response = (yield)
if self.task is not None:
DLOG.debug("Delete-Instance callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Delete-Instance callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run delete instance
"""
# Disable for now until the MANO APIs are used.
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
# if not (self._instance.is_deleting() or
# self._instance.nfvi_instance_is_deleted()):
# DLOG.verbose("Delete-Instance for %s." % self._instance.name)
#
# context = None
# if self._instance.action_fsm is not None:
# action_data = self._instance.action_fsm_data
# if action_data is not None:
# context = action_data.context
#
# nfvi.nfvi_delete_instance(self._instance.uuid, self._callback(),
# context=context)
#
# if self._instance.action_fsm is not None:
# action_data = self._instance.action_fsm_data
# if action_data is not None:
# action_data.set_action_initiated()
# return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
#
# return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
class GuestServicesCreateTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Create Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesCreateTaskWork, self).__init__(
'guest-services-create_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Create
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Create callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self._instance.guest_services_created()
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Create callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Create
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_guest_service_names = guest_services.get_nfvi_guest_service_names()
DLOG.verbose("Guest-Services-Create for %s, nfvi_guest_services=%s."
% (self._instance.name, nfvi_guest_service_names))
nfvi.nfvi_guest_services_create(self._instance.uuid,
self._instance.host_name,
nfvi_guest_service_names,
self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesEnableTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Enable Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesEnableTaskWork, self).__init__(
'guest-services-enable_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Enable
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Enable callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
result_data = response.get('result-data', None)
if result_data is not None:
host_name = result_data.get('host_name', None)
nfvi_guest_services = result_data.get('services', list())
self._instance.nfvi_guest_services_update(
nfvi_guest_services, host_name)
if self.task is not None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Enable callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Enable instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
self._instance.guest_services_enabling()
nfvi_guest_services = guest_services.get_nfvi_guest_services()
DLOG.debug("Guest-Services-Enable for %s, nfvi_guest_services=%s."
% (self._instance.name, nfvi_guest_services))
nfvi.nfvi_guest_services_set(self._instance.uuid,
self._instance.host_name,
nfvi_guest_services,
self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesDisableTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Disable Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesDisableTaskWork, self).__init__(
'guest-services-disable_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Disable
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Disable callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
result_data = response.get('result-data', None)
if result_data is not None:
host_name = result_data.get('host_name', None)
nfvi_guest_services = result_data.get('services', list())
self._instance.nfvi_guest_services_update(
nfvi_guest_services, host_name)
if self.task is not None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Disable callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Disable instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
self._instance.guest_services_disabling()
nfvi_guest_services = guest_services.get_nfvi_guest_services()
DLOG.debug("Guest-Services-Disable for %s, nfvi_guest_services=%s."
% (self._instance.name, nfvi_guest_services))
nfvi.nfvi_guest_services_set(self._instance.uuid,
self._instance.host_name,
nfvi_guest_services,
self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesSetTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Set Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesSetTaskWork, self).__init__(
'guest-services-set_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Set
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Set callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
result_data = response.get('result-data', None)
if result_data is not None:
host_name = result_data.get('host_name', None)
nfvi_guest_services = result_data.get('services', list())
self._instance.nfvi_guest_services_update(
nfvi_guest_services, host_name)
if self.task is not None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Set callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Set instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_guest_services = guest_services.get_nfvi_guest_services()
DLOG.verbose("Guest-Services-Set for %s, nfvi_guest_services=%s."
% (self._instance.name, nfvi_guest_services))
nfvi.nfvi_guest_services_set(self._instance.uuid,
self._instance.host_name,
nfvi_guest_services,
self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesQueryTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Query Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesQueryTaskWork, self).__init__(
'guest-services-query_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Query
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Query callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
result_data = response.get('result-data', None)
if result_data is not None:
host_name = result_data.get('host_name', None)
nfvi_guest_services = result_data.get('services', list())
self._instance.nfvi_guest_services_update(
nfvi_guest_services, host_name)
if self.task is not None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Query callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Query instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_guest_services = guest_services.get_nfvi_guest_services()
DLOG.verbose("Guest-Services-Query for %s, nfvi_guest_services=%s."
% (self._instance.name, nfvi_guest_services))
nfvi.nfvi_guest_services_query(self._instance.uuid, self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesDeleteTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Delete Task Work
"""
def __init__(self, task, instance, force_pass=False):
super(GuestServicesDeleteTaskWork, self).__init__(
'guest-services-delete_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=60)
self._instance_reference = weakref.ref(instance)
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Delete
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Delete callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
self._instance.guest_services_deleted()
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
if self.force_pass:
DLOG.info("Guest-Services-Delete callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Delete instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
DLOG.debug("Guest-Services-Delete for %s." % self._instance.name)
nfvi.nfvi_guest_services_delete(self._instance.uuid, self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_initiated()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
class GuestServicesVoteTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Vote Task Work
"""
def __init__(self, task, instance, action_type, force_pass=False):
super(GuestServicesVoteTaskWork, self).__init__(
'guest-services-vote_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=120)
self._instance_reference = weakref.ref(instance)
self._action_type = action_type
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
def timeout(self):
"""
Handle task work timeout
"""
if self.force_pass:
DLOG.info("Guest-Services-Vote timeout for %s, force-passing."
% self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
else:
DLOG.info("Guest-Services-Vote timeout for %s." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.TIMED_OUT, empty_reason
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Vote
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Vote callback for %s, response=%s."
% (self._instance.name, response))
if response['completed']:
if 0 == response['timeout']:
DLOG.verbose("Guest-Services-Vote callback has a timeout "
"of zero, not waiting for vote response.")
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.extend_timeout(response['timeout'])
else:
if self.force_pass:
DLOG.info("Guest-Services-Vote callback for %s, failed, "
"force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Vote instance
"""
from nfv_vim import tables
if self._instance.is_locked():
DLOG.verbose("Guest-Services-Vote for %s, skipping "
"instance is locked." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if self._instance.is_disabled():
DLOG.verbose("Guest-Services-Vote for %s, skipping "
"instance is disabled." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
action_data = self._instance.action_fsm_data
if action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if nfvi_action_data.skip_guest_vote:
DLOG.verbose("Guest-Services-Vote for %s, skipping "
"guest vote as requested." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
host_table = tables.tables_get_host_table()
host = host_table.get(self._instance.host_name, None)
if host is not None:
if host.is_force_lock():
DLOG.verbose("Guest-Services-Vote for %s, skipping "
"guest vote, host %s is force locking."
% (self._instance.name, host.name))
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
DLOG.debug("Guest-Services-Vote for %s, action_type=%s."
% (self._instance.name, self._action_type))
nfvi.nfvi_guest_services_vote(self._instance.uuid,
self._instance.host_name,
self._action_type,
self._callback())
action_data.set_action_voting()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action allow & reject notifications
"""
handled = False
if INSTANCE_EVENT.GUEST_ACTION_ALLOW == event:
DLOG.debug("Guest-Services-Vote for %s, vote=allow."
% self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
handled = True
elif INSTANCE_EVENT.GUEST_ACTION_REJECT == event:
DLOG.debug("Guest-Services-Vote for %s, vote=reject."
% self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED, empty_reason)
handled = True
return handled
class GuestServicesPreNotifyTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Pre-Notify Task Work
"""
def __init__(self, task, instance, action_type, force_pass=False):
super(GuestServicesPreNotifyTaskWork, self).__init__(
'guest-services-pre-notify_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=120)
self._instance_reference = weakref.ref(instance)
self._action_type = action_type
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
def timeout(self):
"""
Handle task work timeout
"""
if self.force_pass:
DLOG.info("Guest-Services-Pre-Notify timeout for %s, "
"force-passing." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
else:
DLOG.info("Guest-Services-Pre-Notify timeout for %s."
% self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.TIMED_OUT, empty_reason
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Pre-Notify
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Pre-Notify callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
if 0 == response['timeout']:
DLOG.verbose("Guest-Services-Pre-Notify callback has a "
"timeout of zero, not waiting for notify "
"response.")
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.extend_timeout(response['timeout'])
else:
if self.force_pass:
DLOG.info("Guest-Services-Pre-Notify callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Pre-Notify instance
"""
if self._instance.is_locked():
DLOG.verbose("Guest-Services-Pre-Notify for %s, skipping "
"instance is locked." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if self._instance.is_disabled():
DLOG.verbose("Guest-Services-Pre-Notify for %s, skipping "
"instance is disabled." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
action_data = self._instance.action_fsm_data
if action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if nfvi_action_data.skip_guest_notify:
DLOG.verbose("Guest-Services-Pre-Notify for %s, skipping "
"guest notify as requested." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
DLOG.debug("Guest-Services-Pre-Notify for %s, action_type=%s."
% (self._instance.name, self._action_type))
nfvi.nfvi_guest_services_notify(self._instance.uuid,
self._instance.host_name,
self._action_type,
True, self._callback())
action_data.set_action_pre_notify()
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action proceed notifications
"""
handled = False
if INSTANCE_EVENT.GUEST_ACTION_PROCEED == event:
DLOG.debug("Guest-Services-Pre-Notify for %s, notify=proceed."
% self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
handled = True
return handled
class GuestServicesPostNotifyTaskWork(state_machine.StateTaskWork):
"""
Guest-Services-Post-Notify Task Work
"""
def __init__(self, task, instance, action_type, force_pass=False):
super(GuestServicesPostNotifyTaskWork, self).__init__(
'guest-services-post-notify_%s' % instance.name, task,
force_pass=force_pass, timeout_in_secs=120)
self._instance_reference = weakref.ref(instance)
self._action_type = action_type
@property
def _instance(self):
"""
Returns the instance
"""
instance = self._instance_reference()
return instance
def timeout(self):
"""
Handle task work timeout
"""
if self.force_pass:
DLOG.info("Guest-Services-Post-Notify timeout for %s, "
"force-passing." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
else:
DLOG.info("Guest-Services-Post-Notify timeout for %s."
% self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.TIMED_OUT, empty_reason
@coroutine
def _callback(self):
"""
Callback for Guest-Services-Post-Notify
"""
response = (yield)
if self.task is not None:
DLOG.debug("Guest-Services-Post-Notify callback for %s, "
"response=%s." % (self._instance.name, response))
if response['completed']:
if 0 == response['timeout']:
DLOG.debug("Guest-Services-Post-Notify callback has a "
"timeout of zero, not waiting for notify "
"response.")
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.extend_timeout(response['timeout'])
else:
if self.force_pass:
DLOG.info("Guest-Services-Post-Notify callback for %s, "
"failed, force-passing." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS,
empty_reason)
else:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.FAILED,
response['reason'])
def run(self):
"""
Run Guest-Services-Post-Notify instance
"""
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if self._instance.is_locked():
DLOG.verbose("Guest-Services-Post-Notify for %s, skipping "
"instance is locked." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
action_data = self._instance.action_fsm_data
if action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is None:
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
if nfvi_action_data.skip_guest_notify:
DLOG.verbose("Guest-Services-Post-Notify for %s, skipping "
"guest notify as requested." % self._instance.name)
return state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason
DLOG.verbose("Guest-Services-Post-Notify for %s running, "
"action_type=%s." % (self._instance.name,
self._action_type))
if guest_services.guest_communication_established():
# this log is needed for nfv_scenario_tests
DLOG.debug("Guest-Services-Post-Notify for %s, guest "
"communication re-established." % self._instance.name)
DLOG.debug("Guest-Services-Post-Notify for %s, action_type=%s."
% (self._instance.name, self._action_type))
nfvi.nfvi_guest_services_notify(self._instance.uuid,
self._instance.host_name,
self._action_type,
False, self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_post_notify()
self.extend_timeout(guest_services.communication_establish_timeout)
return state_machine.STATE_TASK_WORK_RESULT.WAIT, empty_reason
def handle_event(self, event, event_data=None):
"""
Handle instance action proceed notifications
"""
handled = False
if INSTANCE_EVENT.GUEST_COMMUNICATION_ESTABLISHED == event:
DLOG.debug("Guest-Services-Post-Notify for %s, guest "
"communication re-established." % self._instance.name)
guest_services = self._instance.guest_services
if not guest_services.are_provisioned():
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
return True
action_data = self._instance.action_fsm_data
if action_data is None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
return True
nfvi_action_data = action_data.get_nfvi_action_data()
if nfvi_action_data is None:
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
return True
DLOG.debug("Guest-Services-Post-Notify for %s, action_type=%s."
% (self._instance.name, self._action_type))
nfvi.nfvi_guest_services_notify(self._instance.uuid,
self._instance.host_name,
self._action_type,
False, self._callback())
if self._instance.action_fsm is not None:
action_data = self._instance.action_fsm_data
if action_data is not None:
action_data.set_action_post_notify()
handled = True
elif INSTANCE_EVENT.GUEST_ACTION_PROCEED == event:
DLOG.debug("Guest-Services-Post-Notify for %s, notify=proceed."
% self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
handled = True
elif INSTANCE_EVENT.AUDIT == event:
if self._instance.is_locked():
DLOG.verbose("Guest-Services-Post-Notify for %s, skipping "
"instance is locked." % self._instance.name)
self.task.task_work_complete(
state_machine.STATE_TASK_WORK_RESULT.SUCCESS, empty_reason)
handled = True
return handled
| 37.639932
| 82
| 0.572981
| 9,168
| 88,228
| 5.199062
| 0.026723
| 0.085346
| 0.058492
| 0.072254
| 0.898164
| 0.874436
| 0.865562
| 0.85589
| 0.848358
| 0.842085
| 0
| 0.001406
| 0.346885
| 88,228
| 2,343
| 83
| 37.655997
| 0.825781
| 0.055833
| 0
| 0.815231
| 0
| 0
| 0.090194
| 0.024851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076155
| false
| 0.073658
| 0.006242
| 0
| 0.164794
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6f4bde26d623a8f9936db071c36bdc14c9d7cd0a
| 125
|
py
|
Python
|
src/spn/gpu/__init__.py
|
kripa-experiments/SPFlow
|
32eada604bf5442d8aa10223581b187f7a57d540
|
[
"Apache-2.0"
] | 1
|
2021-09-01T16:26:20.000Z
|
2021-09-01T16:26:20.000Z
|
src/spn/gpu/__init__.py
|
kripa-experiments/SPFlow
|
32eada604bf5442d8aa10223581b187f7a57d540
|
[
"Apache-2.0"
] | null | null | null |
src/spn/gpu/__init__.py
|
kripa-experiments/SPFlow
|
32eada604bf5442d8aa10223581b187f7a57d540
|
[
"Apache-2.0"
] | null | null | null |
from spn.structure.leaves.parametric.Tensorflow import add_parametric_tensorflow_support
add_parametric_tensorflow_support()
| 41.666667
| 88
| 0.912
| 15
| 125
| 7.2
| 0.6
| 0.555556
| 0.425926
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 125
| 3
| 89
| 41.666667
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6f4f354f83c45f9520ffa41eb30a5e19b522f0e7
| 168
|
py
|
Python
|
ocnn/octree/python/ocnn/dataset/__init__.py
|
FrozenSilent/O-CNN
|
9527cd7670856229dfc3281bc05d2077a0553ec3
|
[
"MIT"
] | 6
|
2021-05-18T12:41:31.000Z
|
2021-05-24T10:02:47.000Z
|
ocnn/octree/python/ocnn/dataset/__init__.py
|
FrozenSilent/O-CNN
|
9527cd7670856229dfc3281bc05d2077a0553ec3
|
[
"MIT"
] | null | null | null |
ocnn/octree/python/ocnn/dataset/__init__.py
|
FrozenSilent/O-CNN
|
9527cd7670856229dfc3281bc05d2077a0553ec3
|
[
"MIT"
] | null | null | null |
from ocnn.dataset.dataset import Dataset
from ocnn.dataset.dataset_structure import FolderMappedStructure
from ocnn.dataset.dataset_structure import CsvMappedStructure
| 42
| 64
| 0.892857
| 20
| 168
| 7.4
| 0.35
| 0.162162
| 0.304054
| 0.445946
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 168
| 3
| 65
| 56
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f558f61550d2ae24912fe72c28e60a13bb87f81
| 66,069
|
py
|
Python
|
data_processing/neural_data_shaping.py
|
MaryamHoss/BESD
|
294e9b417cc5866e76be6faad2357ba8d26e61a9
|
[
"Apache-2.0"
] | null | null | null |
data_processing/neural_data_shaping.py
|
MaryamHoss/BESD
|
294e9b417cc5866e76be6faad2357ba8d26e61a9
|
[
"Apache-2.0"
] | null | null | null |
data_processing/neural_data_shaping.py
|
MaryamHoss/BESD
|
294e9b417cc5866e76be6faad2357ba8d26e61a9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 21 17:39:05 2019
@author: hoss3301
"""
import numpy as np
import scipy.io as spio
import h5py
def LagGen(x, b, a):
l, m, p = x.shape
length = np.ceil(l / b).astype(int)
x0 = np.pad(x, ((0, a), (0, 0), (0, 0)), 'edge')
o = np.zeros((length, a, m, p))
for i in range(length):
for j in range(m):
for c in range(p):
o[i, :, j, c] = x0[i * b:i * b + a, j, c]
return o
## to shape the data like mesgarani
arrays = {}
f = h5py.File('C:/Users/hoss3301/work/deep_guinea_ears/data/data/data_rawData/in samples/Matlab Data/all.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes = arrays['all'] # all has a shape of (11,23928,20,265)
spikes_mean = np.zeros((spikes.shape[0], spikes.shape[1], spikes.shape[3])) # take a mean on the trials
spikes_mean = np.mean(spikes, 2)
spikes_resampled = spikes_mean[:, ::8, :] # downsample to 3 khz
b = 30 # 10msec stride
a = 900 # 300msec window
activity_transpose = np.transpose(spikes_resampled,
(1, 2, 0)) # transpose so the first axis is the time and second is the num of neurons
activity_windowed = LagGen(activity_transpose, b, a)
windowed_train = np.zeros((100, 900, 265, 8))
windowed_test = np.zeros((100, 900, 265, 3))
windowed_test[:, :, :, 0:1] = activity_windowed[:, :, :, 0:1]
windowed_train[:, :, :, 0:5] = activity_windowed[:, :, :, 1:6]
windowed_test[:, :, :, 1:3] = activity_windowed[:, :, :, 6:8]
windowed_train[:, :, :, 5:8] = activity_windowed[:, :, :, 8:11]
windowed_train_concat = np.zeros((800, 900, 265))
windowed_test_concat = np.zeros((300, 900, 265))
for i in range(8):
windowed_train_concat[i * 100:(i + 1) * 100, :, :] = windowed_train[:, :, :, i]
for i in range(3):
windowed_test_concat[i * 100:(i + 1) * 100, :, :] = windowed_test[:, :, :, i]
windowed_train_concat = np.expand_dims(windowed_train_concat, axis=3)
windowed_test_concat = np.expand_dims(windowed_test_concat, axis=3)
np.save('data/neural_data/rawData/windowed_train_concat.npy', windowed_train_concat)
np.save('data/neural_data/rawData/windowed_test_concat.npy', windowed_test_concat)
np.save('data/neural_data/rawData/windowed_train.npy', windowed_train)
np.save('data/neural_data/rawData/windowed_test.npy', windowed_test)
####my data
"""
Created on Thu Nov 21 17:39:05 2019
@author: hoss3301
"""
import numpy as np
import scipy.io as spio
import h5py
def LagGen(x, b, a):
l, m, p = x.shape
length = np.ceil(l / b).astype(int)
x0 = np.pad(x, ((0, a), (0, 0), (0, 0)), 'edge')
o = np.zeros((length, a, m, p))
for i in range(length):
for j in range(m):
for c in range(p):
o[i, :, j, c] = x0[i * b:i * b + a, j, c]
return o
# D:\data\ICC\spikes\RawData
arrays = {}
f = h5py.File('C:/Users/hoss3301/work/deep_guinea_ears/data/data_mine/spikes_noisy_clean.mat', 'r')
for k, v in f.items():
arrays[k] = np.array(v)
spikes = arrays['spikes'] # spikes has a shape of (11,23928,20,265)
spikes_mean = np.zeros((spikes.shape[0], spikes.shape[1], spikes.shape[3])) # take a mean on the trials
spikes_mean = np.mean(spikes, 2)
spikes_resampled = spikes_mean[:, ::8, :] # downsample to 3 khz
b = 30 # 10msec stride
a = 900 # 300msec window
activity_transpose = np.transpose(spikes_resampled,
(1, 2, 0)) # transpose so the first axis is the time and second is the num of neurons
activity_windowed = LagGen(activity_transpose, b, a)
windowed_train = np.zeros((100, 900, 265, 8))
windowed_test = np.zeros((100, 900, 265, 3))
windowed_test[:, :, :, 0:1] = activity_windowed[:, :, :, 0:1]
windowed_train[:, :, :, 0:5] = activity_windowed[:, :, :, 1:6]
windowed_test[:, :, :, 1:3] = activity_windowed[:, :, :, 6:8]
windowed_train[:, :, :, 5:8] = activity_windowed[:, :, :, 8:11]
windowed_train_concat = np.zeros((800, 900, 265))
windowed_test_concat = np.zeros((300, 900, 265))
for i in range(8):
windowed_train_concat[i * 100:(i + 1) * 100, :, :] = windowed_train[:, :, :, i]
for i in range(3):
windowed_test_concat[i * 100:(i + 1) * 100, :, :] = windowed_test[:, :, :, i]
windowed_train_concat = np.expand_dims(windowed_train_concat, axis=3)
windowed_test_concat = np.expand_dims(windowed_test_concat, axis=3)
np.save('data/neural_data/rawData/windowed_train_concat.npy', windowed_train_concat)
np.save('data/neural_data/rawData/windowed_test_concat.npy', windowed_test_concat)
np.save('data/neural_data/rawData/windowed_train.npy', windowed_train)
np.save('data/neural_data/rawData/windowed_test.npy', windowed_test)
def LagGen_2d(x, stride, win):
length, width = x.shape
n_win = np.ceil(width / stride).astype(int)
x0 = np.pad(x, ((0, 0), (0, win - 1)), 'edge')
o = np.zeros((length, n_win, win))
for i in range(length):
for j in range(n_win):
o[i, j, :] = x0[i, j * stride:j * stride + win]
return o
str_stim = round(0.24 * 97656.25) # 10msec stride
win_stim = round(0.3 * 97656.25) # 300msec window
str_spk = round(0.2 * 24414.4)
win_spk = round(0.3 * 24414.4)
n_win = np.ceil(input_train.shape[1] / str_stim).astype(int)
input_test_windowed = np.zeros((input_test.shape[0], n_win, win_stim))
input_test_windowed = LagGen_2d(input_test, str_stim, win_stim)
input_test_windowed = LagGen_2d(input_test, str_stim, win_stim)
max_input_test_windowed = np.zeros((1, 79500))
for i in range(79500):
max_input_test_windowed[:, i] = np.max(np.abs(input_test_windowed[i, :]))
input_test_windowed_1 = np.zeros((15900 * 5, 29297))
input_test_windowed_1 = np.reshape(input_test_windowed, (15900 * 5, 29297))
for i in rane(79500):
if max_input_test_windowed[:, i] == 0:
max_input_test_windowed[:, i] = 0.0000001
######### 5-2-2020 this is how i created the old data, wrong normalization
import numpy as np
input_train = np.load('E:/resnet stuff/input_train.npy')
win = int(input_train.shape[1] / 3)
input_train_windowed = np.zeros((42400, 3, win))
for i in range(42400):
for j in range(3):
input_train_windowed[i, j, :] = input_train[i, j * win:j * win + win]
input_train_windowed_reshaped = np.zeros((42400 * 3, win))
input_train_windowed_reshaped = np.reshape(input_train_windowed, (42400 * 3, win))
max_input_train_windowed = np.zeros((1, 127200))
for i in range(127200):
max_input_train_windowed[:, i] = np.max(np.abs(input_train_windowed_reshaped[i, :]))
if max_input_train_windowed[:, i] == 0:
max_input_train_windowed[:, i] = 0.0000001
input_train_windowed_reshaped_normalized = np.zeros(shape=(42400 * 3, win))
for i in range(127200):
input_train_windowed_reshaped_normalized[i, :] = (input_train_windowed_reshaped[i, :]) / (
max_input_train_windowed[:, i])
np.save('E:/resnet stuff/max_input_train_windowed', max_input_train_windowed)
import h5py
f = h5py.File('E:/resnet stuff/input_train_windowed_normalized.h5', 'w')
f.create_dataset('input_train', data=input_train_windowed_reshaped_normalized)
f.close()
spikes_train = np.load('E:/resnet stuff/spikes_train.npy')
win = int(spikes_train.shape[1] / 3)
spikes_train_windowed = np.zeros((42400, 3, win))
for i in range(42400):
for j in range(3):
spikes_train_windowed[i, j, :] = spikes_train[i, j * win:j * win + win]
spikes_train_windowed_reshaped = np.zeros((42400 * 3, win))
spikes_train_windowed_reshaped = np.reshape(spikes_train_windowed, (42400 * 3, win))
max_spikes_train_windowed = np.zeros((1, 127200))
for i in range(127200):
max_spikes_train_windowed[:, i] = np.max(np.abs(spikes_train_windowed_reshaped[i, :]))
if max_spikes_train_windowed[:, i] == 0:
max_spikes_train_windowed[:, i] = 0.0000001
spikes_train_windowed_reshaped_normalized = np.zeros(shape=(42400 * 3, win))
for i in range(127200):
spikes_train_windowed_reshaped_normalized[i, :] = (spikes_train_windowed_reshaped[i, :]) / (
max_spikes_train_windowed[:, i])
np.save('E:/resnet stuff/max_spikes_train_windowed', max_spikes_train_windowed)
import h5py
f = h5py.File('E:/resnet stuff/spikes_train_windowed_reshaped_normalized.h5', 'w')
f.create_dataset('spikes_train', data=spikes_train_windowed_reshaped_normalized)
f.close()
import numpy as np
input_test = np.load('E:/resnet stuff/input_test.npy')
win = int(input_test.shape[1] / 3)
input_test_windowed = np.zeros((15900, 3, win))
for i in range(15900):
for j in range(3):
input_test_windowed[i, j, :] = input_test[i, j * win:j * win + win]
input_test_windowed_reshaped = np.zeros((15900 * 3, win))
input_test_windowed_reshaped = np.reshape(input_test_windowed, (15900 * 3, win))
max_input_test_windowed = np.zeros((1, 47700))
for i in range(47700):
max_input_test_windowed[:, i] = np.max(np.abs(input_test_windowed_reshaped[i, :]))
if max_input_test_windowed[:, i] == 0:
max_input_test_windowed[:, i] = 0.0000001
input_test_windowed_reshaped_normalized = np.zeros(shape=(15900 * 3, win))
for i in range(47700):
input_test_windowed_reshaped_normalized[i, :] = (input_test_windowed_reshaped[i, :]) / (
max_input_test_windowed[:, i])
np.save('E:/resnet stuff/max_input_test_windowed', max_input_test_windowed)
import h5py
f = h5py.File('E:/resnet stuff/input_test_windowed_reshaped_normalized.h5', 'w')
f.create_dataset('input_test', data=input_test_windowed_reshaped_normalized)
f.close()
import numpy as np
spikes_test = np.load('E:/resnet stuff/spikes_test.npy')
win = int(spikes_test.shape[1] / 3)
spikes_test_windowed = np.zeros((15900, 3, win))
for i in range(15900):
for j in range(3):
spikes_test_windowed[i, j, :] = spikes_test[i, j * win:j * win + win]
spikes_test_windowed_reshaped = np.zeros((15900 * 3, win))
spikes_test_windowed_reshaped = np.reshape(spikes_test_windowed, (15900 * 3, win))
max_spikes_test_windowed = np.zeros((1, 47700))
for i in range(47700):
max_spikes_test_windowed[:, i] = np.max(np.abs(spikes_test_windowed_reshaped[i, :]))
if max_spikes_test_windowed[:, i] == 0:
max_spikes_test_windowed[:, i] = 0.0000001
spikes_test_windowed_reshaped_normalized = np.zeros(shape=(15900 * 3, win))
for i in range(47700):
spikes_test_windowed_reshaped_normalized[i, :] = (spikes_test_windowed_reshaped[i, :]) / (
max_spikes_test_windowed[:, i])
np.save('E:/resnet stuff/max_spikes_test_windowed', max_spikes_test_windowed)
import h5py
f = h5py.File('E:/resnet stuff/spikes_test_windowed_reshaped_normalized.h5', 'w')
f.create_dataset('spikes_test', data=spikes_test_windowed_reshaped_normalized)
f.close()
###########################################################
# noisy model wrong normalization
##########################################################
import numpy as np
import scipy.io as spio
import h5py
arrays = {}
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_scream.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_scream = arrays['Raw_all_noisy_scream'] # size=35160,40,256,12
spikes_scream_mean = np.mean(spikes_scream, 1)
spikes_scream_truncated = spikes_scream_mean[0:23928, :, :]
spikes_scream_truncated = np.transpose(spikes_scream_truncated, (1, 2, 0))
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_tooth.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_tooth = arrays['Raw_all_noisy_tooth'] # size=35160,40,256,12
spikes_tooth_mean = np.mean(spikes_tooth, 1)
spikes_tooth_truncated = spikes_tooth_mean[0:23928, :, :]
spikes_tooth_truncated = np.transpose(spikes_tooth_truncated, (1, 2, 0))
spikes_concat = np.concatenate((spikes_scream_truncated, spikes_tooth_truncated), axis=1)
spikes_noisy_train = np.zeros(shape=(256 * 24, 23928))
spikes_noisy_train = np.reshape(spikes_concat, (256 * 24, 23928))
max_spikes_noisy_train = np.zeros((1, 6144))
for i in range(6144):
max_spikes_noisy_train[:, i] = np.max(np.abs(spikes_noisy_train[i, :]))
if max_spikes_noisy_train[:, i] == 0:
max_spikes_noisy_train[:, i] = 0.0000001
spikes_noisy_train_normalized = np.zeros(shape=(6144, 23928))
for i in range(6144):
spikes_noisy_train_normalized[i, :] = (spikes_noisy_train[i, :]) / (max_spikes_noisy_train[:, i])
win = int(spikes_noisy_train_normalized.shape[1] / 3)
spikes_noisy_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
spikes_noisy_train_windowed[i, j, :] = spikes_noisy_train_normalized[i, j * win:j * win + win]
spikes_noisy_train_windowed_reshaped = np.zeros((6144 * 3, win))
spikes_noisy_train_windowed_reshaped = np.reshape(spikes_noisy_train_windowed, (6144 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/spikes_noisy_train_windowed_reshaped.h5', 'w')
f.create_dataset('spikes_train', data=spikes_noisy_train_windowed_reshaped)
f.close()
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_squeal.mat')
arrays = {}
for k, v in f.items():
arrays[k] = np.array(v)
spikes_squeal = arrays['Raw_all_noisy_squeal'] # size=35160,40,256,12
spikes_squeal_mean = np.mean(spikes_squeal, 1)
spikes_squeal_truncated = spikes_squeal_mean[0:23928, :, :]
spikes_squeal_truncated = np.transpose(spikes_squeal_truncated, (1, 2, 0))
spikes_noisy_test = np.zeros(shape=(256 * 12, 23928))
spikes_noisy_test = np.reshape(spikes_squeal_truncated, (256 * 12, 23928))
max_spikes_noisy_test = np.zeros((1, 3072))
for i in range(3072):
max_spikes_noisy_test[:, i] = np.max(np.abs(spikes_noisy_test[i, :]))
if max_spikes_noisy_test[:, i] == 0:
max_spikes_noisy_test[:, i] = 0.0000001
spikes_noisy_test_normalized = np.zeros(shape=(3072, 23928))
for i in range(3072):
spikes_noisy_test_normalized[i, :] = (spikes_noisy_test[i, :]) / (max_spikes_noisy_test[:, i])
win = int(spikes_noisy_test_normalized.shape[1] / 3)
spikes_noisy_test_windowed = np.zeros((3072, 3, win))
for i in range(3072):
for j in range(3):
spikes_noisy_test_windowed[i, j, :] = spikes_noisy_test_normalized[i, j * win:j * win + win]
spikes_noisy_test_windowed_reshaped = np.zeros((3072 * 3, win))
spikes_noisy_test_windowed_reshaped = np.reshape(spikes_noisy_test_windowed, (3072 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/spikes_noisy_test_windowed_reshaped.h5', 'w')
f.create_dataset('spikes_test', data=spikes_noisy_test_windowed_reshaped)
f.close()
import scipy.io as sio
path = 'D:/data/ICC/stim_orig.mat'
mat = sio.loadmat(path)
input_all = mat['original_stim']
input_scream_noisy = input_all[:, 0:12]
input_squeal_noisy = input_all[:, 24:36]
input_tooth_noisy = input_all[:, 48:60]
input_train_noisy = np.concatenate((input_scream_noisy, input_tooth_noisy), axis=1)
input_scream_clean = input_all[:, 72:74]
input_squeal_clean = input_all[:, 74:76]
input_tooth_clean = input_all[:, 76:78]
input_train_noisy = input_train_noisy[:, :, np.newaxis]
list_train = [input_train_noisy] * 256
train_input_noisy = np.concatenate(list_train, axis=2)
train_input_noisy = np.transpose(train_input_noisy, (2, 1, 0))
train_input_noisy_short = train_input_noisy[:, :, 195312:195312 + 191406]
train_input_noisy_short_resampled = train_input_noisy_short[:, :, ::2]
train_input_noisy_short_resampled_reshaped = np.zeros(shape=(256 * 24, 95703))
train_input_noisy_short_resampled_reshaped = np.reshape(train_input_noisy_short_resampled, (256 * 24, 95703))
max_input_noisy_train = np.zeros((1, 6144))
for i in range(6144):
max_input_noisy_train[:, i] = np.max(np.abs(train_input_noisy_short_resampled_reshaped[i, :]))
if max_input_noisy_train[:, i] == 0:
max_input_noisy_train[:, i] = 0.0000001
input_noisy_train_normalized = np.zeros(shape=(6144, 95703))
for i in range(6144):
input_noisy_train_normalized[i, :] = (train_input_noisy_short_resampled_reshaped[i, :]) / (
max_input_noisy_train[:, i])
win = int(95703 / 3)
input_noisy_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
input_noisy_train_windowed[i, j, :] = input_noisy_train_normalized[i, j * win:j * win + win]
input_noisy_train_windowed_reshaped = np.zeros((6144 * 3, win))
input_noisy_train_windowed_reshaped = np.reshape(input_noisy_train_windowed, (6144 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/input_noisy_train.h5', 'w')
f.create_dataset('input_train', data=input_noisy_train_windowed_reshaped)
f.close()
input_test_noisy = input_squeal_noisy
input_test_noisy = input_test_noisy[:, :, np.newaxis]
list_test = [input_test_noisy] * 256
test_input_noisy = np.concatenate(list_test, axis=2)
test_input_noisy = np.transpose(test_input_noisy, (2, 1, 0))
test_input_noisy_short = test_input_noisy[:, :, 195312:195312 + 191406]
test_input_noisy_short_resampled = test_input_noisy_short[:, :, ::2]
test_input_noisy_short_resampled_reshaped = np.zeros(shape=(256 * 12, 95703))
test_input_noisy_short_resampled_reshaped = np.reshape(test_input_noisy_short_resampled, (256 * 12, 95703))
max_input_noisy_test = np.zeros((1, 3072))
for i in range(3072):
max_input_noisy_test[:, i] = np.max(np.abs(test_input_noisy_short_resampled_reshaped[i, :]))
if max_input_noisy_test[:, i] == 0:
max_input_noisy_test[:, i] = 0.0000001
input_noisy_test_normalized = np.zeros(shape=(3072, 95703))
for i in range(3072):
input_noisy_test_normalized[i, :] = (test_input_noisy_short_resampled_reshaped[i, :]) / (max_input_noisy_test[:, i])
win = int(95703 / 3)
input_noisy_test_windowed = np.zeros((3072, 3, win))
for i in range(3072):
for j in range(3):
input_noisy_test_windowed[i, j, :] = input_noisy_test_normalized[i, j * win:j * win + win]
input_noisy_test_windowed_reshaped = np.zeros((3072 * 3, win))
input_noisy_test_windowed_reshaped = np.reshape(input_noisy_test_windowed, (3072 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/input_noisy_test_windowed_reshaped.h5', 'w')
f.create_dataset('input_test', data=input_noisy_test_windowed_reshaped)
f.close()
input_squeal_clean_55 = input_squeal_clean[195312:195312 + 191406, 0:1]
input_squeal_clean_65 = input_squeal_clean[195312:195312 + 191406, 1:2]
list_squeal_clean_55 = [input_squeal_clean_55] * 2
input_squeal_clean_55_2 = np.concatenate(list_squeal_clean_55, axis=1)
list_squeal_clean_65 = [input_squeal_clean_65] * 2
input_squeal_clean_65_2 = np.concatenate(list_squeal_clean_65, axis=1)
input_squeal_clean_all = np.concatenate((input_squeal_clean_55_2, input_squeal_clean_65_2), axis=1)
list_squeal = [input_squeal_clean_all] * 3
input_test_clean = np.concatenate(list_squeal, axis=1)
input_scream_clean_55 = input_scream_clean[195312:195312 + 191406, 0:1]
input_scream_clean_65 = input_scream_clean[195312:195312 + 191406, 1:2]
list_scream_clean_55 = [input_scream_clean_55] * 2
input_scream_clean_55_2 = np.concatenate(list_scream_clean_55, axis=1)
list_scream_clean_65 = [input_scream_clean_65] * 2
input_scream_clean_65_2 = np.concatenate(list_scream_clean_65, axis=1)
input_scream_clean_all = np.concatenate((input_scream_clean_55_2, input_scream_clean_65_2), axis=1)
list_scream = [input_scream_clean_all] * 3
input_scream_clean_all_12 = np.concatenate(list_scream, axis=1)
input_tooth_clean_55 = input_tooth_clean[195312:195312 + 191406, 0:1]
input_tooth_clean_65 = input_tooth_clean[195312:195312 + 191406, 1:2]
list_tooth_clean_55 = [input_tooth_clean_55] * 2
input_tooth_clean_55_2 = np.concatenate(list_tooth_clean_55, axis=1)
list_tooth_clean_65 = [input_tooth_clean_65] * 2
input_tooth_clean_65_2 = np.concatenate(list_tooth_clean_65, axis=1)
input_tooth_clean_all = np.concatenate((input_tooth_clean_55_2, input_tooth_clean_65_2), axis=1)
list_tooth = [input_tooth_clean_all] * 3
input_tooth_clean_all_12 = np.concatenate(list_tooth, axis=1)
input_train_clean = np.concatenate((input_scream_clean_all_12, input_tooth_clean_all_12), axis=1)
input_train_clean = input_train_clean[:, :, np.newaxis]
list_train = [input_train_clean] * 256
input_train_clean_all = np.concatenate(list_train, axis=2)
input_train_clean_all = np.transpose(input_train_clean_all, (2, 1, 0))
input_train_clean_reshaped = np.zeros(shape=(256 * 24, 191406))
input_train_clean_reshaped = np.reshape(input_train_clean_all, (256 * 24, 191406))
input_train_clean_reshaped = input_train_clean_reshaped[:, ::2]
max_input_clean_train = np.zeros((1, 6144))
for i in range(6144):
max_input_clean_train[:, i] = np.max(np.abs(input_train_clean_reshaped[i, :]))
if max_input_clean_train[:, i] == 0:
max_input_clean_train[:, i] = 0.0000001
input_clean_train_normalized = np.zeros(shape=(6144, 95703))
for i in range(6144):
input_clean_train_normalized[i, :] = (input_train_clean_reshaped[i, :]) / (max_input_clean_train[:, i])
win = int(95703 / 3)
input_clean_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
input_clean_train_windowed[i, j, :] = input_clean_train_normalized[i, j * win:j * win + win]
input_clean_train_windowed_reshaped = np.zeros((6144 * 3, win))
input_clean_train_windowed_reshaped = np.reshape(input_clean_train_windowed, (6144 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/input_clean_train.h5', 'w')
f.create_dataset('input_train', data=input_clean_train_windowed_reshaped)
f.close()
input_test_clean = input_test_clean[:, :, np.newaxis]
list_test = [input_test_clean] * 256
input_test_clean_all = np.concatenate(list_test, axis=2)
input_test_clean_all = np.transpose(input_test_clean_all, (2, 1, 0))
input_test_clean_reshaped = np.zeros(shape=(256 * 12, 191406))
input_test_clean_reshaped = np.reshape(input_test_clean_all, (256 * 12, 191406))
input_test_clean_reshaped = input_test_clean_reshaped[:, ::2]
max_input_clean_test = np.zeros((1, 3072))
for i in range(3072):
max_input_clean_test[:, i] = np.max(np.abs(input_test_clean_reshaped[i, :]))
if max_input_clean_test[:, i] == 0:
max_input_clean_test[:, i] = 0.0000001
input_clean_test_normalized = np.zeros(shape=(3072, 95703))
for i in range(3072):
input_clean_test_normalized[i, :] = (input_test_clean_reshaped[i, :]) / (max_input_clean_test[:, i])
win = int(95703 / 3)
input_clean_test_windowed = np.zeros((3072, 3, win))
for i in range(3072):
for j in range(3):
input_clean_test_windowed[i, j, :] = input_clean_test_normalized[i, j * win:j * win + win]
input_clean_test_windowed_reshaped = np.zeros((3072 * 3, win))
input_clean_test_windowed_reshaped = np.reshape(input_clean_test_windowed, (3072 * 3, win))
f = h5py.File('D:/data/ICC/spikes/RawData/input_clean_test.h5', 'w')
f.create_dataset('input_test', data=input_clean_test_windowed_reshaped)
f.close()
########################################################################
#### to divide all by maximum of the whole matrix
#########################################################################
# thilos data
import scipy.io as spio
import numpy as np
mat = spio.loadmat('D:/data_workFolder/deep_guinea_ears/data/data_rawData/in samples/Matlab Data/input.mat')
input_train = mat['input_train'] # shape: (95703,8)
input_test = mat['input_test'] # shape: (95703,3)
input_test = input_test[:, :, np.newaxis, np.newaxis] # shape: 95703,3,1,1,
list_test = [input_test] * 20
input_test = np.concatenate(list_test, axis=2) # shape: 95703,3,20,1,
list_test = [input_test] * 265
input_test = np.concatenate(list_test, axis=3) ##shape: 95703,3,20,265
input_train = input_train[:, :, np.newaxis, np.newaxis]
list_train = [input_train] * 20
input_train = np.concatenate(list_train, axis=2)
list_train = [input_train] * 265
input_train = np.concatenate(list_train, axis=3)
input_test = np.transpose(input_test, (2, 3, 1, 0)) # shape: 20,265,3,95703
input_train = np.transpose(input_train, (2, 3, 1, 0))
input_test_reshaped = np.zeros(shape=(20 * 265 * 3, 95703))
input_test_reshaped = np.reshape(input_test, (20 * 265 * 3, 95703)) # shape: 15900,95703
input_train_reshaped = np.zeros(shape=(20 * 265 * 8, 95703))
input_train_reshaped = np.reshape(input_train, (20 * 265 * 8, 95703))
max_input_test = np.max(np.abs(input_test_reshaped))
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_test',
max_input_test)
input_test_normalized = np.zeros(shape=(15900, 95703))
input_test_normalized = input_test_reshaped / max_input_test
max_input_train = np.max(np.abs(input_train_reshaped))
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_train',
max_input_train)
input_train_normalized = np.zeros(shape=(42400, 95703))
input_train_normalized = input_train_reshaped / max_input_train
input_test_cut = np.zeros(shape=(input_test_normalized.shape[0], 3, int(input_test_normalized.shape[1] / 3)))
win = int(input_test_normalized.shape[1] / 3)
for i in range(15900):
for j in range(3):
input_test_cut[i, j, :] = input_test_normalized[i, j * win:j * win + win] # shape: 15900,3,31901
input_train_cut = np.zeros(shape=(input_train_normalized.shape[0], 3, int(input_train_normalized.shape[1] / 3)))
win = int(input_train_normalized.shape[1] / 3)
for i in range(input_train_normalized.shape[0]):
for j in range(3):
input_train_cut[i, j, :] = input_train_normalized[i, j * win:j * win + win]
input_test = np.zeros(shape=(15900 * 3, 31901)) # shape: 47700,31901
input_test = np.reshape(input_test_cut, (15900 * 3, 31901))
f = h5py.File('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/input_test.h5',
'w')
f.create_dataset('input_test', data=input_test)
f.close()
input_train = np.zeros(shape=(input_train_cut.shape[0] * 3, 31901))
input_train = np.reshape(input_train_cut, (input_train_cut.shape[0] * 3, 31901))
f = h5py.File('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/input_train.h5',
'w')
f.create_dataset('input_train', data=input_train)
f.close()
arrays = {}
f = h5py.File('D:/data_workFolder/deep_guinea_ears/data/data_rawData/in samples/Matlab Data/all.mat', 'r')
for k, v in f.items():
arrays[k] = np.array(v)
spikes = arrays['all'] # all has a shape of (11,23928,20,265)
# spikes=np.transpose(spikes,(1,2,3,0))
spikes_test = np.zeros(shape=(3, 23928, 20, 265)) # shape of (3,23928,20,265)
spikes_train = np.zeros(shape=(8, 23928, 20, 265))
spikes_test[0:1, :, :, :] = spikes[0:1, :, :, :]
spikes_train[0:5, :, :, :] = spikes[1:6, :, :, :]
spikes_test[1:3, :, :, :] = spikes[6:8, :, :, :]
spikes_train[5:8, :, :, :] = spikes[8:11, :, :, :]
spikes_test = np.transpose(spikes_test, (2, 3, 0, 1)) # shape: 20,265,3,23928
spikes_train = np.transpose(spikes_train, (2, 3, 0, 1))
spikes_test_reshaped = np.zeros(shape=(20 * 265 * 3, 23928)) # shape: 15900,23928
spikes_test_reshaped = np.reshape(spikes_test, (15900, 23928))
spikes_train_reshaped = np.zeros(shape=(20 * 265 * 8, 23928)) # shape: 42400,23928
spikes_train_reshaped = np.reshape(spikes_train, (20 * 265 * 8, 23928))
max_spikes_test = np.max(np.abs(spikes_test_reshaped))
spikes_test_normalized = spikes_test_reshaped / max_spikes_test
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_spikes_test',
max_spikes_test)
spikes_test_cut = np.zeros(shape=(spikes_test_normalized.shape[0], 3, int(spikes_test_normalized.shape[1] / 3)))
win = int(spikes_test_normalized.shape[1] / 3)
for i in range(15900):
for j in range(3):
spikes_test_cut[i, j, :] = spikes_test_normalized[i, j * win:j * win + win] # shape: 15900,3,31901
spikes_test = np.zeros(shape=(15900 * 3, 7976))
spikes_test = np.reshape(spikes_test_cut, (15900 * 3, 7976))
f = h5py.File('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/spikes_test.h5',
'w')
f.create_dataset('spikes_test', data=spikes_test)
f.close()
max_spikes_train = np.max(np.abs(spikes_train_reshaped))
spikes_train_normalized = spikes_train_reshaped / max_spikes_train
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_spikes_train',
max_spikes_train)
spikes_train_cut = np.zeros(shape=(spikes_train_normalized.shape[0], 3, int(spikes_train_normalized.shape[1] / 3)))
win = int(spikes_train_normalized.shape[1] / 3)
for i in range(42400):
for j in range(3):
spikes_train_cut[i, j, :] = spikes_train_normalized[i, j * win:j * win + win] # shape: 15900,3,31901
spikes_train = np.zeros(shape=(42400 * 3, 7976))
spikes_train = np.reshape(spikes_train_cut, (42400 * 3, 7976))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/spikes_train.h5', 'w')
f.create_dataset('spikes_train', data=spikes_train)
f.close()
# my data
import numpy as np
import scipy.io as spio
import h5py
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_squeal.mat')
arrays = {}
for k, v in f.items():
arrays[k] = np.array(v)
spikes_squeal = arrays['Raw_all_noisy_squeal'] # size=35160,40,256,12
spikes_squeal_mean = np.mean(spikes_squeal, 1) # 35160,256,12
spikes_squeal_truncated = spikes_squeal_mean[0:23928, :, :] # 23928,256,12
spikes_squeal_truncated = np.transpose(spikes_squeal_truncated, (1, 2, 0)) # 256,12,23928
spikes_noisy_test = np.zeros(shape=(256 * 12, 23928))
spikes_noisy_test = np.reshape(spikes_squeal_truncated, (256 * 12, 23928)) # 3072,23928
max_spikes_noisy_test = np.max(np.abs(spikes_noisy_test))
spikes_noisy_test_normalized = np.zeros(shape=(3072, 23928))
spikes_noisy_test_normalized = spikes_noisy_test / max_spikes_noisy_test # 3072,23928
win = int(spikes_noisy_test_normalized.shape[1] / 3)
spikes_noisy_test_windowed = np.zeros((3072, 3, win)) # 3072,3,7976
for i in range(3072):
for j in range(3):
spikes_noisy_test_windowed[i, j, :] = spikes_noisy_test_normalized[i, j * win:j * win + win]
spikes_noisy_test_windowed_reshaped = np.zeros((3072 * 3, win)) # 3072*3,7976
spikes_noisy_test_windowed_reshaped = np.reshape(spikes_noisy_test_windowed, (3072 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/spikes_noisy_test.h5',
'w')
f.create_dataset('spikes_test', data=spikes_noisy_test_windowed_reshaped)
f.close()
np.save(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_spikes_noisy_test',
max_spikes_noisy_test)
arrays = {}
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_scream.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_scream = arrays['Raw_all_noisy_scream'] # size=35160,40,256,12
spikes_scream_mean = np.mean(spikes_scream, 1) # size=35160,256,12
spikes_scream_truncated = spikes_scream_mean[0:23928, :, :] # size=23928,256,12
spikes_scream_truncated = np.transpose(spikes_scream_truncated, (1, 2, 0))
arrays = {}
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_tooth.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_tooth = arrays['Raw_all_noisy_tooth'] # size=35160,40,256,12
spikes_tooth_mean = np.mean(spikes_tooth, 1) # size=35160,256,12
spikes_tooth_truncated = spikes_tooth_mean[0:23928, :, :] # size=23928,256,12
spikes_tooth_truncated = np.transpose(spikes_tooth_truncated, (1, 2, 0)) # size=256,12,23928
spikes_concat = np.concatenate((spikes_scream_truncated, spikes_tooth_truncated), axis=1) # size=256,24,23928
spikes_noisy_train = np.zeros(shape=(256 * 24, 23928)) # size=256*24,23928
spikes_noisy_train = np.reshape(spikes_concat, (256 * 24, 23928))
max_spikes_noisy_train = np.max(np.abs(spikes_noisy_train))
np.save(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_spikes_noisy_train',
max_spikes_noisy_train)
spikes_noisy_train_normalized = np.zeros(shape=(6144, 23928))
spikes_noisy_train_normalized = spikes_noisy_train / max_spikes_noisy_train
win = int(spikes_noisy_train_normalized.shape[1] / 3)
spikes_noisy_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
spikes_noisy_train_windowed[i, j, :] = spikes_noisy_train_normalized[i, j * win:j * win + win]
spikes_noisy_train_windowed_reshaped = np.zeros((6144 * 3, win))
win = 7976
spikes_noisy_train_windowed_reshaped = np.reshape(spikes_noisy_train_windowed, (6144 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/spikes_noisy_train.h5',
'w')
f.create_dataset('spikes_train', data=spikes_noisy_train_windowed_reshaped)
f.close()
import scipy.io as sio
path = 'D:/data/ICC/stim_orig.mat'
mat = sio.loadmat(path)
input_all = mat['original_stim']
input_scream_noisy = input_all[:, 0:12]
input_squeal_noisy = input_all[:, 24:36]
input_tooth_noisy = input_all[:, 48:60]
input_train_noisy = np.concatenate((input_scream_noisy, input_tooth_noisy), axis=1)
input_scream_clean = input_all[:, 72:74]
input_squeal_clean = input_all[:, 74:76]
input_tooth_clean = input_all[:, 76:78]
input_train_noisy = input_train_noisy[:, :, np.newaxis]
list_train = [input_train_noisy] * 256
train_input_noisy = np.concatenate(list_train, axis=2) # 407070,24,256
train_input_noisy = np.transpose(train_input_noisy, (2, 1, 0)) # 256,24,407070
train_input_noisy_short = train_input_noisy[:, :, 195312:195312 + 191406] # 256,24,191406
train_input_noisy_short_resampled = train_input_noisy_short[:, :, ::2] # 256,24,95703
train_input_noisy_short_resampled_reshaped = np.zeros(shape=(256 * 24, 95703))
train_input_noisy_short_resampled_reshaped = np.reshape(train_input_noisy_short_resampled, (256 * 24, 95703))
max_input_noisy_train = np.max(np.abs(train_input_noisy_short_resampled_reshaped))
np.save(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_noisy_train',
max_input_noisy_train)
input_noisy_train_normalized = np.zeros(shape=(6144, 95703))
input_noisy_train_normalized = train_input_noisy_short_resampled_reshaped / max_input_noisy_train
win = int(95703 / 3)
input_noisy_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
input_noisy_train_windowed[i, j, :] = input_noisy_train_normalized[i, j * win:j * win + win]
input_noisy_train_windowed_reshaped = np.zeros((6144 * 3, win))
input_noisy_train_windowed_reshaped = np.reshape(input_noisy_train_windowed, (6144 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/input_noisy_train.h5',
'w')
f.create_dataset('input_train', data=input_noisy_train_windowed_reshaped)
f.close()
input_test_noisy = input_squeal_noisy
input_test_noisy = input_test_noisy[:, :, np.newaxis]
list_test = [input_test_noisy] * 256
test_input_noisy = np.concatenate(list_test, axis=2)
test_input_noisy = np.transpose(test_input_noisy, (2, 1, 0))
test_input_noisy_short = test_input_noisy[:, :, 195312:195312 + 191406]
test_input_noisy_short_resampled = test_input_noisy_short[:, :, ::2]
test_input_noisy_short_resampled_reshaped = np.zeros(shape=(256 * 12, 95703))
test_input_noisy_short_resampled_reshaped = np.reshape(test_input_noisy_short_resampled, (256 * 12, 95703))
max_input_noisy_test = np.max(np.abs(test_input_noisy_short_resampled_reshaped))
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_noisy_test',
max_input_noisy_test)
input_noisy_test_normalized = np.zeros(shape=(3072, 95703))
input_noisy_test_normalized = test_input_noisy_short_resampled_reshaped / max_input_noisy_test
win = int(95703 / 3)
input_noisy_test_windowed = np.zeros((3072, 3, win))
for i in range(3072):
for j in range(3):
input_noisy_test_windowed[i, j, :] = input_noisy_test_normalized[i, j * win:j * win + win]
input_noisy_test_windowed_reshaped = np.zeros((3072 * 3, win))
input_noisy_test_windowed_reshaped = np.reshape(input_noisy_test_windowed, (3072 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/input_noisy_test.h5', 'w')
f.create_dataset('input_test', data=input_noisy_test_windowed_reshaped)
f.close()
input_squeal_clean_55 = input_squeal_clean[195312:195312 + 191406, 0:1]
input_squeal_clean_65 = input_squeal_clean[195312:195312 + 191406, 1:2]
list_squeal_clean_55 = [input_squeal_clean_55] * 2
input_squeal_clean_55_2 = np.concatenate(list_squeal_clean_55, axis=1)
list_squeal_clean_65 = [input_squeal_clean_65] * 2
input_squeal_clean_65_2 = np.concatenate(list_squeal_clean_65, axis=1)
input_squeal_clean_all = np.concatenate((input_squeal_clean_55_2, input_squeal_clean_65_2), axis=1)
list_squeal = [input_squeal_clean_all] * 3
input_test_clean = np.concatenate(list_squeal, axis=1)
input_scream_clean_55 = input_scream_clean[195312:195312 + 191406, 0:1]
input_scream_clean_65 = input_scream_clean[195312:195312 + 191406, 1:2]
list_scream_clean_55 = [input_scream_clean_55] * 2
input_scream_clean_55_2 = np.concatenate(list_scream_clean_55, axis=1)
list_scream_clean_65 = [input_scream_clean_65] * 2
input_scream_clean_65_2 = np.concatenate(list_scream_clean_65, axis=1)
input_scream_clean_all = np.concatenate((input_scream_clean_55_2, input_scream_clean_65_2), axis=1)
list_scream = [input_scream_clean_all] * 3
input_scream_clean_all_12 = np.concatenate(list_scream, axis=1)
input_tooth_clean_55 = input_tooth_clean[195312:195312 + 191406, 0:1]
input_tooth_clean_65 = input_tooth_clean[195312:195312 + 191406, 1:2]
list_tooth_clean_55 = [input_tooth_clean_55] * 2
input_tooth_clean_55_2 = np.concatenate(list_tooth_clean_55, axis=1)
list_tooth_clean_65 = [input_tooth_clean_65] * 2
input_tooth_clean_65_2 = np.concatenate(list_tooth_clean_65, axis=1)
input_tooth_clean_all = np.concatenate((input_tooth_clean_55_2, input_tooth_clean_65_2), axis=1)
list_tooth = [input_tooth_clean_all] * 3
input_tooth_clean_all_12 = np.concatenate(list_tooth, axis=1)
input_train_clean = np.concatenate((input_scream_clean_all_12, input_tooth_clean_all_12), axis=1)
input_train_clean = input_train_clean[:, :, np.newaxis]
list_train = [input_train_clean] * 256
input_train_clean_all = np.concatenate(list_train, axis=2)
input_train_clean_all = np.transpose(input_train_clean_all, (2, 1, 0))
input_train_clean_reshaped = np.zeros(shape=(256 * 24, 191406))
input_train_clean_reshaped = np.reshape(input_train_clean_all, (256 * 24, 191406))
input_train_clean_reshaped = input_train_clean_reshaped[:, ::2]
max_input_clean_train = np.max(np.abs(input_train_clean_reshaped))
np.save(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_clean_train',
max_input_clean_train)
input_clean_train_normalized = np.zeros(shape=(6144, 95703))
input_clean_train_normalized = input_train_clean_reshaped / max_input_clean_train
win = int(95703 / 3)
input_clean_train_windowed = np.zeros((6144, 3, win))
for i in range(6144):
for j in range(3):
input_clean_train_windowed[i, j, :] = input_clean_train_normalized[i, j * win:j * win + win]
input_clean_train_windowed_reshaped = np.zeros((6144 * 3, win))
input_clean_train_windowed_reshaped = np.reshape(input_clean_train_windowed, (6144 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/input_clean_train.h5',
'w')
f.create_dataset('input_train', data=input_clean_train_windowed_reshaped)
f.close()
# input_test_clean=input_squeal_clean
input_test_clean = input_test_clean[:, :, np.newaxis]
list_test = [input_test_clean] * 256
input_test_clean_all = np.concatenate(list_test, axis=2)
input_test_clean_all = np.transpose(input_test_clean_all, (2, 1, 0))
input_test_clean_reshaped = np.zeros(shape=(256 * 12, 191406))
input_test_clean_reshaped = np.reshape(input_test_clean_all, (256 * 12, 191406))
input_test_clean_reshaped = input_test_clean_reshaped[:, ::2]
max_input_clean_test = np.max(np.abs(input_test_clean_reshaped))
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized/max_input_clean_test',
max_input_clean_test)
input_clean_test_normalized = np.zeros(shape=(3072, 95703))
input_clean_test_normalized = input_test_clean_reshaped / max_input_clean_test
win = int(95703 / 3)
input_clean_test_windowed = np.zeros((3072, 3, win))
for i in range(3072):
for j in range(3):
input_clean_test_windowed[i, j, :] = input_clean_test_normalized[i, j * win:j * win + win]
input_clean_test_windowed_reshaped = np.zeros((3072 * 3, win))
input_clean_test_windowed_reshaped = np.reshape(input_clean_test_windowed, (3072 * 3, win))
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/time_domain/new_allMatrixNormalized//input_clean_test.h5',
'w')
f.create_dataset('input_test', data=input_clean_test_windowed_reshaped)
f.close()
########################################### 23-3-2020
# this is the part to use when making gammatone representation of the stimuli and
# also we have to window the spikes with the same method we are windowing the stimuli fo gammatones
from gammatone.gtgram import gtgram
import scipy.io as spio
import numpy as np
mat = spio.loadmat('C:/Users/hoss3301/work/deep_guinea_ears/data/data/data_rawData/in samples/Matlab Data/input.mat')
input_train = mat['input_train'] # shape: (95703,8)
input_test = mat['input_test'] # shape: (95703,3)
input_test = input_test[:, :, np.newaxis, np.newaxis] # shape: 95703,3,1,1,
list_test = [input_test] * 20
input_test = np.concatenate(list_test, axis=2) # shape: 95703,3,20,1,
list_test = [input_test] * 265
input_test = np.concatenate(list_test, axis=3) ##shape: 95703,3,20,265
list_train = [input_train] * 20
input_train = np.concatenate(list_train, axis=2)
input_train = input_train[:, :, np.newaxis, np.newaxis]
list_train = [input_train] * 265
input_train = np.concatenate(list_train, axis=3)
input_test = np.transpose(input_test, (2, 3, 1, 0)) # shape: 20,265,3,95703
input_train = np.transpose(input_train, (2, 3, 1, 0))
input_test_reshaped = np.zeros(shape=(20 * 265 * 3, 95703))
input_test_reshaped = np.reshape(input_test, (20 * 265 * 3, 95703)) # shape: 15900,95703
input_train_reshaped = np.zeros(shape=(20 * 265 * 8, 95703))
input_train_reshaped = np.reshape(input_train, (20 * 265 * 8, 95703))
# input_test_cut=np.zeros(shape=(input_test_reshaped.shape[0],3,int(input_test_reshaped.shape[1]/3)))
# win=int(input_test_reshaped.shape[1]/3)
# for i in range(15900):
# for j in range(3):
# input_test_cut[i,j,:]=input_test_reshaped[i,j*win:j*win+win] #shape: 15900,3,31901
# input_train_cut=np.zeros(shape=(input_train_reshaped.shape[0],3,int(input_train_reshaped.shape[1]/3)))
# win=int(input_train_reshaped.shape[1]/3)
# for i in range(input_train_reshaped.shape[0]):
# for j in range(3):
# input_train_cut[i,j,:]=input_train_reshaped[i,j*win:j*win+win]
# input_test=np.zeros(shape=(15900*3,31901)) #shape: 47700,31901
# input_test=np.reshape(input_test_cut,(15900*3,31901))
# input_train=np.zeros(shape=(input_train_cut.shape[0]*3,31901))
# input_train=np.reshape(input_train_cut,(input_train_cut.shape[0]*3,31901))
n_channels = 128
# add_samples=np.round(0.025*97656.25)
# input_test=
gamma_waves_test = np.zeros(shape=(input_test_reshaped.shape[0], n_channels, 96)) # shape: 15900,128,96
gamma_waves_train = np.zeros(shape=(input_train_reshaped.shape[0], n_channels, 96))
"""
from gammatone.gtgram import gtgram_strides
from gammatone.filters import centre_freqs, make_erb_filters
cfs = centre_freqs(97656.25, 128, 20)
fcoefs = np.flipud(make_erb_filters(97656.25, cfs))
#xf = np.zeros((fcoefs[:,9].shape[0], input_test_reshaped.shape[0],input_test_reshaped.shape[1]))
#xf = np.zeros((fcoefs[:,9].shape[0], c.shape[0], c.shape[1]))
gain = fcoefs[:, 9]
# A0, A11, A2
As1 = fcoefs[:, (0, 1, 5)]
# A0, A12, A2
As2 = fcoefs[:, (0, 2, 5)]
# A0, A13, A2
As3 = fcoefs[:, (0, 3, 5)]
# A0, A14, A2
As4 = fcoefs[:, (0, 4, 5)]
# B0, B1, B2
Bs = fcoefs[:, 6:9]
from scipy import signal as sgn
test_wave= np.zeros((128,15900, 952))
for i in range(100):
print(i)
xf=np.zeros((fcoefs[:,9].shape[0], int(input_test_reshaped.shape[0]/100),input_test_reshaped.shape[1]))
for idx in range(0, fcoefs.shape[0]):
y1 = sgn.lfilter(As1[idx], Bs[idx], input_test_reshaped[i*159:i*159+159])
y2 = sgn.lfilter(As2[idx], Bs[idx], y1)
y3 = sgn.lfilter(As3[idx], Bs[idx], y2)
y4 = sgn.lfilter(As4[idx], Bs[idx], y3)
xf[idx, ::] = y4 / gain[idx]
xe = np.power(xf, 2)
nwin, hop_samples, ncols = gtgram_strides(
97656.25,
0.025,
0.001,
xe.shape[2]
)
for cnum in range(ncols):
segment = xe[:,:, cnum * hop_samples + np.arange(nwin)]
test_wave[:,i*159:i*159+159, cnum] = np.sqrt(segment.mean(2))
"""
for i in range(input_test_reshaped.shape[0]):
gamma_waves_test[i, :, :] = gtgram(input_test_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
for i in range(input_train.shape[0]):
gamma_waves_train[i, :, :] = gtgram(input_train_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
gamma_waves_test = np.transpose(gamma_waves_test, (0, 2, 1)) # shape: 15900,96,128
gamma_waves_train = np.transpose(gamma_waves_train, (0, 2, 1))
max_gamma_train = np.max(np.abs(gamma_waves_train))
max_gamma_test = np.max(np.abs(gamma_waves_test))
gamma_train_normalized = gamma_waves_train / max_gamma_train;
gamma_test_normalized = gamma_waves_test / max_gamma_test;
np.save('data/original/gammatone/clean/max_gamma_test', max_gamma_test)
np.save('data/original/gammatone/clean/gamma_waves_test', gamma_waves_test)
f = h5py.File('data/original/gammatone/clean/gamma_waves_test_normalized.h5', 'w')
f.create_dataset('gamma_waves_test', data=gamma_test_normalized)
f.close()
np.save('data/original/gammatone/clean/max_gamma_train', max_gamma_train)
np.save('data/original/gammatone/clean/gamma_waves_train', gamma_waves_train)
f = h5py.File('data/original/gammatone/clean/gamma_waves_train_normalized.h5', 'w')
f.create_dataset('gamma_waves_train', data=gamma_train_normalized)
f.close()
import h5py
import numpy as np
arrays = {}
f = h5py.File('C:/Users/hoss3301/work/deep_guinea_ears/data/data/data_rawData/in samples/Matlab Data/all.mat', 'r')
for k, v in f.items():
arrays[k] = np.array(v)
spikes = arrays['all'] # all has a shape of (11,23928,20,265)
# spikes=np.transpose(spikes,(1,2,3,0))
spikes_test = np.zeros(shape=(3, 23928, 20, 265)) # shape of (3,23928,20,265)
spikes_train = np.zeros(shape=(8, 23928, 20, 265))
spikes_test[0:1, :, :, :] = spikes[0:1, :, :, :]
spikes_train[0:5, :, :, :] = spikes[1:6, :, :, :]
spikes_test[1:3, :, :, :] = spikes[6:8, :, :, :]
spikes_train[5:8, :, :, :] = spikes[8:11, :, :, :]
spikes_test = np.transpose(spikes_test, (2, 3, 0, 1)) # shape: 20,265,3,23928
spikes_train = np.transpose(spikes_train, (2, 3, 0, 1))
spikes_test_reshaped = np.zeros(shape=(20 * 265 * 3, 23928)) # shape: 15900,23928
spikes_test_reshaped = np.reshape(spikes_test, (15900, 23928))
spikes_train_reshaped = np.zeros(shape=(20 * 265 * 8, 23928)) # shape: 42400,23928
spikes_train_reshaped = np.reshape(spikes_train, (20 * 265 * 8, 23928))
# spikes_test_cut=np.zeros(shape=(spikes_test_reshaped.shape[0],3,int(spikes_test_reshaped.shape[1]/3)))
# win=int(spikes_test_reshaped.shape[1]/3)
# for i in range(15900):
# for j in range(3):
# spikes_test_cut[i,j,:]=spikes_test_reshaped[i,j*win:j*win+win] #shape: 15900,3,7976
#
# spikes_train_cut=np.zeros(shape=(spikes_train_reshaped.shape[0],3,int(spikes_train_reshaped.shape[1]/3)))
# win=int(spikes_train_reshaped.shape[1]/3)
# for i in range(spikes_train_reshaped.shape[0]):
# for j in range(3):
# spikes_train_cut[i,j,:]=spikes_train_reshaped[i,j*win:j*win+win]
# spikes_test=np.zeros(shape=(15900*3,7976)) #shape: 47700,7976
# spikes_test=np.reshape(spikes_test_cut,(15900*3,7976))
# spikes_train=np.zeros(shape=(spikes_train_cut.shape[0]*3,7976))
# spikes_train=np.reshape(input_train_cut,(spikes_train_cut.shape[0]*3,7976))
from gammatone.gtgram import gtgram_strides
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_test_reshaped.shape[1]
)
spikes_test_windowed = np.zeros((spikes_test_reshaped.shape[0], ncols))
# for i in range(15900):
print(i)
for cnum in range(ncols):
segment = spikes_test_reshaped[:, cnum * hop_samples + np.arange(nwin)] # shape: 15900,96
spikes_test_windowed[:, cnum] = segment.mean(1)
spikes_test_windowed = spikes_test_windowed[:, :, np.newaxis] # 15900,96,1
list_test = [spikes_test_windowed] * 128
spikes_test = np.concatenate(list_test, axis=2) # 15900,96,128
max_spikes_test = np.max(np.abs(spikes_test))
spikes_test_normalized = spikes_test / max_spikes_test
np.save('data/original/gammatone/clean/max_spikes_test', max_spikes_test)
f = h5py.File('data/original/gammatone/clean/spikes_test_normalized.h5', 'w')
f.create_dataset('spikes_test', data=spikes_test_normalized)
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_train_reshaped.shape[1]
)
spikes_train_windowed = np.zeros((spikes_train_reshaped.shape[0], ncols))
for cnum in range(ncols):
segment = spikes_train_reshaped[:, cnum * hop_samples + np.arange(nwin)] # shape: 42400,96
spikes_train_windowed[:, cnum] = segment.mean(1)
spikes_train_windowed = spikes_train_windowed[:, :, np.newaxis] # 42400,96,1
list_train = [spikes_train_windowed] * 128
spikes_train = np.concatenate(list_train, axis=2) # 42400,96,128
max_spikes_train = np.max(np.abs(spikes_train))
spikes_train_normalized = spikes_train / max_spikes_train
np.save('data/original/gammatone/clean/max_spikes_test', max_spikes_test)
f = h5py.File('data/original/gammatone/clean/spikes_train_normalized.h5', 'w')
f.create_dataset('spikes_train', data=spikes_train_normalized)
f.close()
###my data
import scipy.io as sio
import numpy as np
path = 'D:/data/ICC/stim_orig.mat'
mat = sio.loadmat(path)
input_all = mat['original_stim']
input_scream_noisy = input_all[:, 0:12]
input_squeal_noisy = input_all[:, 24:36]
input_test_noisy = input_squeal_noisy
input_tooth_noisy = input_all[:, 48:60]
input_train_noisy = np.concatenate((input_scream_noisy, input_tooth_noisy), axis=1)
input_scream_clean = input_all[:, 72:74]
input_squeal_clean = input_all[:, 74:76]
input_tooth_clean = input_all[:, 76:78]
# train data
# input_train_noisy=input_train_noisy[:,:,np.newaxis,np.newaxis]
input_train_noisy = input_train_noisy[:, :, np.newaxis] # 47070,24,1
input_train_noisy = input_train_noisy[195312:195312 + 191406, :, :]
# input_train_noisy=input_train_noisy[195312:195312+191406,:,:,:]
# input_train_noisy=input_train_noisy[::2,:,:,:]
input_train_noisy = input_train_noisy[::2, :, :] # 95703,24,1
list_train = [input_train_noisy] * 256
# train_input_noisy=np.concatenate(list_train,axis=3)
train_input_noisy = np.concatenate(list_train, axis=2) # 95703,24,256
# list_train=[train_input_noisy]*15
# input_train_noisy=np.concatenate(list_train,axis=2)
input_train_noisy = np.transpose(input_train_noisy, (2, 1, 0)) # 256,24,95703
# input_train_noisy=np.transpose(input_train_noisy,(2,3,1,0))
# input_train_noisy_reshaped=np.zeros(shape=(256*24*15,95703))
# input_train_noisy_reshaped=np.reshape(input_train_noisy,(256*24*15,95703))
input_train_noisy_reshaped = np.zeros(shape=(256 * 24, 95703))
input_train_noisy_reshaped = np.reshape(input_train_noisy, (256 * 24, 95703))
n_channels = 128
gamma_waves_train = np.zeros(shape=(input_train_noisy_reshaped.shape[0], n_channels, 96))
from gammatone.gtgram import gtgram
for i in range(input_train_noisy_reshaped.shape[0]):
gamma_waves_train[i, :, :] = gtgram(input_train_noisy_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
# test data noisy
input_test_noisy = input_test_noisy[:, :, np.newaxis] # 47070,24,1
input_test_noisy = input_test_noisy[195312:195312 + 191406, :, :]
# input_train_noisy=input_train_noisy[195312:195312+191406,:,:,:]
# input_train_noisy=input_train_noisy[::2,:,:,:]
input_test_noisy = input_test_noisy[::2, :, :] # 95703,24,1
list_test = [input_test_noisy] * 256
# train_input_noisy=np.concatenate(list_train,axis=3)
input_test_noisy = np.concatenate(list_test, axis=2) # 95703,24,256
# list_train=[train_input_noisy]*15
# input_train_noisy=np.concatenate(list_train,axis=2)
input_test_noisy = np.transpose(input_test_noisy, (2, 1, 0)) # 256,24,95703
# input_train_noisy=np.transpose(input_train_noisy,(2,3,1,0))
# input_train_noisy_reshaped=np.zeros(shape=(256*24*15,95703))
# input_train_noisy_reshaped=np.reshape(input_train_noisy,(256*24*15,95703))
input_test_noisy_reshaped = np.zeros(shape=(256 * 12, 95703))
input_test_noisy_reshaped = np.reshape(input_test_noisy, (256 * 12, 95703))
n_channels = 128
gamma_waves_test = np.zeros(shape=(input_test_noisy_reshaped.shape[0], n_channels, 96))
from gammatone.gtgram import gtgram
for i in range(input_test_noisy_reshaped.shape[0]):
gamma_waves_test[i, :, :] = gtgram(input_test_noisy_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
gamma_waves_test = np.transpose(gamma_waves_test, (0, 2, 1)) # 3072,96,128
gamma_waves_train = np.transpose(gamma_waves_train, (0, 2, 1)) # 6144,96,128
max_gamma_train = np.max(np.abs(gamma_waves_train))
max_gamma_test = np.max(np.abs(gamma_waves_test))
gamma_train_noisy_normalized = gamma_waves_train / max_gamma_train;
gamma_test_noisy_normalized = gamma_waves_test / max_gamma_test;
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_gamma_test_noisy', max_gamma_test)
# np.save('data/original/gammatone/noisy/gamma_waves_test',gamma_waves_test)
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/gamma_waves_test_noisy_normalized.h5', 'w')
f.create_dataset('gamma_waves_test', data=gamma_test_noisy_normalized)
f.close()
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_gamma_train_noisy', max_gamma_train)
# np.save('data/original/gammatone/clean/gamma_waves_train',gamma_waves_train)
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/gamma_waves_train_noisy_normalized.h5', 'w')
f.create_dataset('gamma_waves_train', data=gamma_train_noisy_normalized)
f.close()
input_scream_clean_55 = input_scream_clean[195312:195312 + 191406, 0:1]
input_scream_clean_65 = input_scream_clean[195312:195312 + 191406, 1:2]
list_scream_clean_55 = [input_scream_clean_55] * 2
input_scream_clean_55_2 = np.concatenate(list_scream_clean_55, axis=1)
list_scream_clean_65 = [input_scream_clean_65] * 2
input_scream_clean_65_2 = np.concatenate(list_scream_clean_65, axis=1)
input_scream_clean_all = np.concatenate((input_scream_clean_55_2, input_scream_clean_65_2), axis=1)
list_scream = [input_scream_clean_all] * 3
input_scream_clean_all_12 = np.concatenate(list_scream, axis=1)
input_tooth_clean_55 = input_tooth_clean[195312:195312 + 191406, 0:1]
input_tooth_clean_65 = input_tooth_clean[195312:195312 + 191406, 1:2]
list_tooth_clean_55 = [input_tooth_clean_55] * 2
input_tooth_clean_55_2 = np.concatenate(list_tooth_clean_55, axis=1)
list_tooth_clean_65 = [input_tooth_clean_65] * 2
input_tooth_clean_65_2 = np.concatenate(list_tooth_clean_65, axis=1)
input_tooth_clean_all = np.concatenate((input_tooth_clean_55_2, input_tooth_clean_65_2), axis=1)
list_tooth = [input_tooth_clean_all] * 3
input_tooth_clean_all_12 = np.concatenate(list_tooth, axis=1)
input_train_clean = np.concatenate((input_scream_clean_all_12, input_tooth_clean_all_12), axis=1)
input_train_clean = input_train_clean[:, :, np.newaxis] # 191406,24,1
list_train = [input_train_clean] * 256
input_train_clean_all = np.concatenate(list_train, axis=2)
input_train_clean_all = np.transpose(input_train_clean_all, (2, 1, 0))
input_train_clean_all = input_train_clean_all[::2, :, :] # 95703,24,256
input_train_clean_reshaped = np.zeros(shape=(256 * 24, 95703))
input_train_clean_reshaped = np.reshape(input_train_clean_all, (256 * 24, 95703)) # 6144,95703
n_channels = 128
gamma_waves_train_clean = np.zeros(shape=(input_train_clean_reshaped.shape[0], n_channels, 96))
from gammatone.gtgram import gtgram
for i in range(input_train_clean_reshaped.shape[0]):
gamma_waves_train_clean[i, :, :] = gtgram(input_train_clean_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
input_squeal_clean_55 = input_squeal_clean[195312:195312 + 191406, 0:1] # 191406,1
input_squeal_clean_65 = input_squeal_clean[195312:195312 + 191406, 1:2]
list_squeal_clean_55 = [input_squeal_clean_55] * 2
input_squeal_clean_55_2 = np.concatenate(list_squeal_clean_55, axis=1)
list_squeal_clean_65 = [input_squeal_clean_65] * 2
input_squeal_clean_65_2 = np.concatenate(list_squeal_clean_65, axis=1)
input_squeal_clean_all = np.concatenate((input_squeal_clean_55_2, input_squeal_clean_65_2), axis=1)
# 191406,4
list_squeal = [input_squeal_clean_all] * 3
input_test_clean = np.concatenate(list_squeal, axis=1) # 191406,12
input_test_clean = input_test_clean[::2, :] # 95703,12
input_test_clean = input_test_clean[:, :, np.newaxis] # 95703,12,1
list_test = [input_test_clean] * 256
input_test_clean_all = np.concatenate(list_test, axis=2)
input_test_clean_all = np.transpose(input_test_clean_all, (2, 1, 0)) # 256,12,95703
input_test_clean_reshaped = np.zeros(shape=(256 * 12, 95703))
input_test_clean_reshaped = np.reshape(input_test_clean_all, (256 * 12, 95703)) # 6144,95703
n_channels = 128
gamma_waves_test_clean = np.zeros(shape=(input_test_clean_reshaped.shape[0], n_channels, 96))
from gammatone.gtgram import gtgram
for i in range(input_test_clean_reshaped.shape[0]):
gamma_waves_test_clean[i, :, :] = gtgram(input_test_clean_reshaped[i, :], 97656.25, 0.025, 0.01, n_channels, 20)
if i % 10 == 0:
print(i)
gamma_waves_test_clean = np.transpose(gamma_waves_test_clean, (0, 2, 1)) # shape: 15900,96,128
gamma_waves_train_clean = np.transpose(gamma_waves_train_clean, (0, 2, 1)) # 6144,96,128
max_gamma_train = np.max(np.abs(gamma_waves_train_clean))
max_gamma_test = np.max(np.abs(gamma_waves_test_clean))
gamma_train_clean_normalized = gamma_waves_train_clean / max_gamma_train;
gamma_test_clean_normalized = gamma_waves_test_clean / max_gamma_test;
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_gamma_test_clean', max_gamma_test)
# np.save('data/original/gammatone/noisy/gamma_waves_test',gamma_waves_test)
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/gamma_waves_test_clean_normalized.h5', 'w')
f.create_dataset('gamma_waves_test', data=gamma_test_clean_normalized)
f.close()
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_gamma_train_clean', max_gamma_train)
# np.save('data/original/gammatone/clean/gamma_waves_train',gamma_waves_train)
f = h5py.File(
'D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/gamma_waves_train_clean_normalized.h5', 'w')
f.create_dataset('gamma_waves_train', data=gamma_train_clean_normalized)
f.close()
import numpy as np
import scipy.io as spio
import h5py
arrays = {}
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_scream.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_scream = arrays['Raw_all_noisy_scream'] # size=35160,40,256,12
spikes_scream_mean = np.mean(spikes_scream, 1) # 35160,256,12
spikes_scream_truncated = spikes_scream_mean[0:23928, :, :] # 23928,256,12
spikes_scream_truncated = np.transpose(spikes_scream_truncated, (1, 2, 0)) # 256,12,23928
arrays = {}
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_tooth.mat')
for k, v in f.items():
arrays[k] = np.array(v)
spikes_tooth = arrays['Raw_all_noisy_tooth'] # size=35160,40,256,12
spikes_tooth_mean = np.mean(spikes_tooth, 1)
spikes_tooth_truncated = spikes_tooth_mean[0:23928, :, :]
spikes_tooth_truncated = np.transpose(spikes_tooth_truncated, (1, 2, 0))
spikes_concat = np.concatenate((spikes_scream_truncated, spikes_tooth_truncated), axis=1)
spikes_noisy_train = np.zeros(shape=(256 * 24, 23928))
spikes_noisy_train = np.reshape(spikes_concat, (256 * 24, 23928))
from gammatone.gtgram import gtgram_strides
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_noisy_train.shape[1]
)
spikes_noisy_train_windowed = np.zeros((spikes_noisy_train.shape[0], ncols))
for cnum in range(ncols):
segment = spikes_noisy_train[:, cnum * hop_samples + np.arange(nwin)] # shape: 3072,96
spikes_noisy_train_windowed[:, cnum] = segment.mean(1)
spikes_noisy_train_windowed = spikes_noisy_train_windowed[:, :, np.newaxis] # 3076,96,1
list_train = [spikes_noisy_train_windowed] * 128
spikes_noisy_train = np.concatenate(list_train, axis=2) # 3076,96,128
max_spikes_train = np.max(np.abs(spikes_noisy_train))
spikes_train_normalized = spikes_noisy_train / max_spikes_train
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_spikes_train', max_spikes_train)
f = h5py.File('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/spikes_noisy_train_normalized.h5',
'w')
f.create_dataset('spikes_train', data=spikes_train_normalized)
f.close()
f = h5py.File('D:/data/ICC/spikes/RawData/Raw_all_noisy_squeal.mat')
arrays = {}
for k, v in f.items():
arrays[k] = np.array(v)
# test
spikes_squeal = arrays['Raw_all_noisy_squeal'] # size=35160,40,256,12
spikes_squeal_mean = np.mean(spikes_squeal, 1) # 35160,256,12
spikes_squeal_truncated = spikes_squeal_mean[0:23928, :, :] # 23928,256,12
spikes_squeal_truncated = np.transpose(spikes_squeal_truncated, (1, 2, 0)) # 256,12,23928
spikes_noisy_test = np.zeros(shape=(256 * 12, 23928))
spikes_noisy_test = np.reshape(spikes_squeal_truncated, (256 * 12, 23928))
from gammatone.gtgram import gtgram_strides
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_noisy_test.shape[1]
)
spikes_noisy_test_windowed = np.zeros((spikes_noisy_test.shape[0], ncols))
for cnum in range(ncols):
segment = spikes_noisy_test[:, cnum * hop_samples + np.arange(nwin)] # shape: 3072,96
spikes_noisy_test_windowed[:, cnum] = segment.mean(1)
spikes_noisy_test_windowed = spikes_noisy_test_windowed[:, :, np.newaxis] # 3076,96,1
list_test = [spikes_noisy_test_windowed] * 128
spikes_noisy_test = np.concatenate(list_test, axis=2) # 3076,96,128
max_spikes_test = np.max(np.abs(spikes_noisy_test))
spikes_test_normalized = spikes_noisy_test / max_spikes_test
np.save('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/max_spikes_test', max_spikes_test)
f = h5py.File('D:/data_workFolder/TrialsOfNeuralVocalRecon/original/gammatone/noisy/spikes_noisy_test_normalized.h5',
'w')
f.create_dataset('spikes_test', data=spikes_test_normalized)
f.close()
arrays = {}
f = h5py.File('C:/Users/hoss3301/work/deep_guinea_ears/data/data/data_rawData/in samples/Matlab Data/all.mat', 'r')
for k, v in f.items():
arrays[k] = np.array(v)
spikes = arrays['all'] # all has a shape of (11,23928,20,265)
# spikes=np.transpose(spikes,(1,2,3,0))
spikes_test = np.zeros(shape=(3, 23928, 20, 265)) # shape of (3,23928,20,265)
spikes_train = np.zeros(shape=(8, 23928, 20, 265))
spikes_test[0:1, :, :, :] = spikes[0:1, :, :, :]
spikes_train[0:5, :, :, :] = spikes[1:6, :, :, :]
spikes_test[1:3, :, :, :] = spikes[6:8, :, :, :]
spikes_train[5:8, :, :, :] = spikes[8:11, :, :, :]
spikes_test = np.transpose(spikes_test, (2, 3, 0, 1)) # shape: 20,265,3,23928
spikes_train = np.transpose(spikes_train, (2, 3, 0, 1))
spikes_test_reshaped = np.zeros(shape=(20 * 265 * 3, 23928)) # shape: 15900,23928
spikes_test_reshaped = np.reshape(spikes_test, (15900, 23928))
spikes_train_reshaped = np.zeros(shape=(20 * 265 * 8, 23928)) # shape: 42400,23928
spikes_train_reshaped = np.reshape(spikes_train, (20 * 265 * 8, 23928))
from gammatone.gtgram import gtgram_strides
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_test_reshaped.shape[1]
)
spikes_test_windowed = np.zeros((spikes_test_reshaped.shape[0], ncols))
# for i in range(15900):
print(i)
for cnum in range(ncols):
segment = spikes_test_reshaped[:, cnum * hop_samples + np.arange(nwin)] # shape: 15900,96
spikes_test_windowed[:, cnum] = segment.mean(1)
spikes_test_windowed = spikes_test_windowed[:, :, np.newaxis] # 15900,96,1
list_test = [spikes_test_windowed] * 128
spikes_test = np.concatenate(list_test, axis=2) # 15900,96,128
max_spikes_test = np.max(np.abs(spikes_test))
spikes_test_normalized = spikes_test / max_spikes_test
np.save('data/original/gammatone/clean/max_spikes_test', max_spikes_test)
f = h5py.File('data/original/gammatone/clean/spikes_test_normalized.h5', 'w')
f.create_dataset('spikes_test', data=spikes_test_normalized)
nwin, hop_samples, ncols = gtgram_strides(
24414.4,
0.025,
0.01,
spikes_train_reshaped.shape[1]
)
spikes_train_windowed = np.zeros((spikes_train_reshaped.shape[0], ncols))
for cnum in range(ncols):
segment = spikes_train_reshaped[:, cnum * hop_samples + np.arange(nwin)] # shape: 42400,96
spikes_train_windowed[:, cnum] = segment.mean(1)
spikes_train_windowed = spikes_train_windowed[:, :, np.newaxis] # 42400,96,1
list_train = [spikes_train_windowed] * 128
spikes_train = np.concatenate(list_train, axis=2) # 42400,96,128
max_spikes_train = np.max(np.abs(spikes_train))
spikes_train_normalized = spikes_train / max_spikes_train
np.save('data/original/gammatone/clean/max_spikes_test', max_spikes_test)
f = h5py.File('data/original/gammatone/clean/spikes_train_normalized.h5', 'w')
f.create_dataset('spikes_train', data=spikes_train_normalized)
f.close()
| 37.840206
| 120
| 0.740514
| 10,599
| 66,069
| 4.29399
| 0.027927
| 0.039989
| 0.020039
| 0.014743
| 0.947816
| 0.913759
| 0.87322
| 0.84035
| 0.810709
| 0.770258
| 0
| 0.085346
| 0.11824
| 66,069
| 1,745
| 121
| 37.861891
| 0.695882
| 0.088998
| 0
| 0.729358
| 0
| 0.00367
| 0.121621
| 0.104045
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002752
| false
| 0
| 0.037615
| 0
| 0.043119
| 0.007339
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48c350d755033bccc52a5b3342077de6c3384999
| 1,676
|
py
|
Python
|
networkCalc.py
|
z1pti3/jimiPlugin-networkCalc
|
dcd543d2b6ab7fef8566fe33fc033f292d80f8a7
|
[
"Apache-2.0"
] | null | null | null |
networkCalc.py
|
z1pti3/jimiPlugin-networkCalc
|
dcd543d2b6ab7fef8566fe33fc033f292d80f8a7
|
[
"Apache-2.0"
] | null | null | null |
networkCalc.py
|
z1pti3/jimiPlugin-networkCalc
|
dcd543d2b6ab7fef8566fe33fc033f292d80f8a7
|
[
"Apache-2.0"
] | null | null | null |
import jimi
class _networkCalc(jimi.plugin._plugin):
version = 0.1
def install(self):
# Register models
jimi.model.registerModel("networkCalcSubnetCalculator","_networkCalcSubnetCalculator","_action","plugins.networkCalc.models.action")
jimi.model.registerModel("networkCalcBinaryCalculator","_networkCalcBinaryCalculator","_action","plugins.networkCalc.models.action")
jimi.model.registerModel("networkCalcGetCertificate","_networkCalcGetCertificate","_action","plugins.networkCalc.models.action")
jimi.model.registerModel("networkCalcDNSLookup","_networkCalcDNSLookup","_action","plugins.networkCalc.models.action")
jimi.model.registerModel("networkCalcWHOISLookup","_networkCalcWHOISLookup","_action","plugins.networkCalc.models.action")
return True
def uninstall(self):
# deregister models
jimi.model.deregisterModel("networkCalcSubnetCalculator","_networkCalcSubnetCalculator","_action","plugins.networkCalc.models.action")
jimi.model.deregisterModel("networkCalcBinaryCalculator","_networkCalcBinaryCalculator","_action","plugins.networkCalc.models.action")
jimi.model.deregisterModel("networkCalcGetCertificate","_networkCalcGetCertificate","_action","plugins.networkCalc.models.action")
jimi.model.deregisterModel("networkCalcDNSLookup","_networkCalcDNSLookup","_action","plugins.networkCalc.models.action")
jimi.model.deregisterModel("networkCalcWHOISLookup","_networkCalcWHOISLookup","_action","plugins.networkCalc.models.action")
return True
def upgrade(self,LatestPluginVersion):
#if self.version < 0.2:
return True
| 62.074074
| 142
| 0.766706
| 135
| 1,676
| 9.355556
| 0.22963
| 0.071259
| 0.190024
| 0.23753
| 0.834521
| 0.834521
| 0.834521
| 0.834521
| 0.745843
| 0.147268
| 0
| 0.002686
| 0.111575
| 1,676
| 26
| 143
| 64.461538
| 0.845534
| 0.032816
| 0
| 0.157895
| 0
| 0
| 0.552876
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.052632
| 0.052632
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5b195fe57c2cf51198b3961caaa8da974eaee15e
| 12
|
py
|
Python
|
_draft/x_5_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
_draft/x_5_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | 1
|
2021-11-13T08:03:04.000Z
|
2021-11-13T08:03:04.000Z
|
_draft/x_5_7.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
# x_5_7
#
#
| 3
| 7
| 0.416667
| 3
| 12
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.333333
| 12
| 3
| 8
| 4
| 0.125
| 0.416667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d2bd035f7fe4f8ee6d13f2cf8c67570d6e7e1c18
| 20,754
|
py
|
Python
|
sdk/purview/azure-purview-account/azure/purview/account/aio/operations/_collections_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1
|
2021-09-16T02:33:52.000Z
|
2021-09-16T02:33:52.000Z
|
sdk/purview/azure-purview-account/azure/purview/account/aio/operations/_collections_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1
|
2019-08-05T19:14:28.000Z
|
2019-08-05T19:30:05.000Z
|
sdk/purview/azure-purview-account/azure/purview/account/aio/operations/_collections_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1
|
2016-04-19T22:15:47.000Z
|
2016-04-19T22:15:47.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from json import loads as _loads
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from ...operations._collections_operations import build_create_or_update_collection_request, build_delete_collection_request, build_get_collection_path_request, build_get_collection_request, build_list_child_collection_names_request, build_list_collections_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class CollectionsOperations:
"""CollectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get_collection(
self,
collection_name: str,
**kwargs: Any
) -> Any:
"""Get a collection.
:param collection_name:
:type collection_name: str
:return: JSON object
:rtype: Any
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"collectionProvisioningState": "str (optional)",
"description": "str (optional)",
"friendlyName": "str (optional)",
"name": "str (optional)",
"parentCollection": {
"referenceName": "str (optional)",
"type": "str (optional)"
},
"systemData": {
"createdAt": "datetime (optional)",
"createdBy": "str (optional)",
"createdByType": "str (optional)",
"lastModifiedAt": "datetime (optional)",
"lastModifiedBy": "str (optional)",
"lastModifiedByType": "str (optional)"
}
}
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_collection_request(
collection_name=collection_name,
template_url=self.get_collection.metadata['url'],
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = await self._client.send_request(request, stream=False, _return_pipeline_response=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_collection.metadata = {'url': '/collections/{collectionName}'} # type: ignore
@distributed_trace_async
async def create_or_update_collection(
self,
collection_name: str,
collection: Any,
**kwargs: Any
) -> Any:
"""Creates or updates a collection entity.
:param collection_name:
:type collection_name: str
:param collection:
:type collection: Any
:return: JSON object
:rtype: Any
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
collection = {
"collectionProvisioningState": "str (optional)",
"description": "str (optional)",
"friendlyName": "str (optional)",
"name": "str (optional)",
"parentCollection": {
"referenceName": "str (optional)",
"type": "str (optional)"
},
"systemData": {
"createdAt": "datetime (optional)",
"createdBy": "str (optional)",
"createdByType": "str (optional)",
"lastModifiedAt": "datetime (optional)",
"lastModifiedBy": "str (optional)",
"lastModifiedByType": "str (optional)"
}
}
# response body for status code(s): 200
response.json() == {
"collectionProvisioningState": "str (optional)",
"description": "str (optional)",
"friendlyName": "str (optional)",
"name": "str (optional)",
"parentCollection": {
"referenceName": "str (optional)",
"type": "str (optional)"
},
"systemData": {
"createdAt": "datetime (optional)",
"createdBy": "str (optional)",
"createdByType": "str (optional)",
"lastModifiedAt": "datetime (optional)",
"lastModifiedBy": "str (optional)",
"lastModifiedByType": "str (optional)"
}
}
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
json = collection
request = build_create_or_update_collection_request(
collection_name=collection_name,
content_type=content_type,
json=json,
template_url=self.create_or_update_collection.metadata['url'],
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = await self._client.send_request(request, stream=False, _return_pipeline_response=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_collection.metadata = {'url': '/collections/{collectionName}'} # type: ignore
@distributed_trace_async
async def delete_collection(
self,
collection_name: str,
**kwargs: Any
) -> None:
"""Deletes a Collection entity.
:param collection_name:
:type collection_name: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_collection_request(
collection_name=collection_name,
template_url=self.delete_collection.metadata['url'],
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = await self._client.send_request(request, stream=False, _return_pipeline_response=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
delete_collection.metadata = {'url': '/collections/{collectionName}'} # type: ignore
@distributed_trace
def list_collections(
self,
*,
skip_token: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable[Any]:
"""List the collections in the account.
:keyword skip_token:
:paramtype skip_token: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"count": "long (optional)",
"nextLink": "str (optional)",
"value": [
{
"collectionProvisioningState": "str (optional)",
"description": "str (optional)",
"friendlyName": "str (optional)",
"name": "str (optional)",
"parentCollection": {
"referenceName": "str (optional)",
"type": "str (optional)"
},
"systemData": {
"createdAt": "datetime (optional)",
"createdBy": "str (optional)",
"createdByType": "str (optional)",
"lastModifiedAt": "datetime (optional)",
"lastModifiedBy": "str (optional)",
"lastModifiedByType": "str (optional)"
}
}
]
}
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_collections_request(
skip_token=skip_token,
template_url=self.list_collections.metadata['url'],
)._to_pipeline_transport_request()
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
else:
request = build_list_collections_request(
skip_token=skip_token,
template_url=next_link,
)._to_pipeline_transport_request()
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_collections.metadata = {'url': '/collections'} # type: ignore
@distributed_trace
def list_child_collection_names(
self,
collection_name: str,
*,
skip_token: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable[Any]:
"""Lists the child collections names in the collection.
:param collection_name:
:type collection_name: str
:keyword skip_token:
:paramtype skip_token: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.async_paging.AsyncItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"count": "long (optional)",
"nextLink": "str (optional)",
"value": [
{
"friendlyName": "str (optional)",
"name": "str (optional)"
}
]
}
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_child_collection_names_request(
collection_name=collection_name,
skip_token=skip_token,
template_url=self.list_child_collection_names.metadata['url'],
)._to_pipeline_transport_request()
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
else:
request = build_list_child_collection_names_request(
collection_name=collection_name,
skip_token=skip_token,
template_url=next_link,
)._to_pipeline_transport_request()
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_child_collection_names.metadata = {'url': '/collections/{collectionName}/getChildCollectionNames'} # type: ignore
@distributed_trace_async
async def get_collection_path(
self,
collection_name: str,
**kwargs: Any
) -> Any:
"""Gets the parent name and parent friendly name chains that represent the collection path.
:param collection_name:
:type collection_name: str
:return: JSON object
:rtype: Any
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"parentFriendlyNameChain": [
"str (optional)"
],
"parentNameChain": [
"str (optional)"
]
}
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_collection_path_request(
collection_name=collection_name,
template_url=self.get_collection_path.metadata['url'],
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request.url = self._client.format_url(request.url, **path_format_arguments)
pipeline_response = await self._client.send_request(request, stream=False, _return_pipeline_response=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_collection_path.metadata = {'url': '/collections/{collectionName}/getCollectionPath'} # type: ignore
| 39.911538
| 264
| 0.569095
| 1,862
| 20,754
| 6.11493
| 0.119227
| 0.044441
| 0.031618
| 0.023186
| 0.819867
| 0.800369
| 0.772791
| 0.748814
| 0.741613
| 0.721412
| 0
| 0.006318
| 0.328852
| 20,754
| 519
| 265
| 39.988439
| 0.811114
| 0.14802
| 0
| 0.722008
| 0
| 0
| 0.053292
| 0.030751
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019305
| false
| 0
| 0.046332
| 0
| 0.127413
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2d3f75aded4416d1d4487b2ffa8dc2920678742
| 50,670
|
py
|
Python
|
sdk/python/build/lib/pulumi_databricks/databricks/cluster.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-12-10T07:35:59.000Z
|
2022-03-23T22:53:55.000Z
|
sdk/python/pulumi_databricks/databricks/cluster.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/databricks/cluster.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
spark_version: pulumi.Input[str],
autoscale: Optional[pulumi.Input['ClusterAutoscaleArgs']] = None,
autotermination_minutes: Optional[pulumi.Input[int]] = None,
aws_attributes: Optional[pulumi.Input['ClusterAwsAttributesArgs']] = None,
azure_attributes: Optional[pulumi.Input['ClusterAzureAttributesArgs']] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_log_conf: Optional[pulumi.Input['ClusterClusterLogConfArgs']] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
custom_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
data_security_mode: Optional[pulumi.Input[str]] = None,
docker_image: Optional[pulumi.Input['ClusterDockerImageArgs']] = None,
driver_instance_pool_id: Optional[pulumi.Input[str]] = None,
driver_node_type_id: Optional[pulumi.Input[str]] = None,
enable_elastic_disk: Optional[pulumi.Input[bool]] = None,
enable_local_disk_encryption: Optional[pulumi.Input[bool]] = None,
gcp_attributes: Optional[pulumi.Input['ClusterGcpAttributesArgs']] = None,
idempotency_token: Optional[pulumi.Input[str]] = None,
init_scripts: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
is_pinned: Optional[pulumi.Input[bool]] = None,
libraries: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]] = None,
node_type_id: Optional[pulumi.Input[str]] = None,
num_workers: Optional[pulumi.Input[int]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
single_user_name: Optional[pulumi.Input[str]] = None,
spark_conf: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_env_vars: Optional[pulumi.Input[Mapping[str, Any]]] = None,
ssh_public_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Cluster resource.
"""
pulumi.set(__self__, "spark_version", spark_version)
if autoscale is not None:
pulumi.set(__self__, "autoscale", autoscale)
if autotermination_minutes is not None:
pulumi.set(__self__, "autotermination_minutes", autotermination_minutes)
if aws_attributes is not None:
pulumi.set(__self__, "aws_attributes", aws_attributes)
if azure_attributes is not None:
pulumi.set(__self__, "azure_attributes", azure_attributes)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if cluster_log_conf is not None:
pulumi.set(__self__, "cluster_log_conf", cluster_log_conf)
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if custom_tags is not None:
pulumi.set(__self__, "custom_tags", custom_tags)
if data_security_mode is not None:
pulumi.set(__self__, "data_security_mode", data_security_mode)
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if driver_instance_pool_id is not None:
pulumi.set(__self__, "driver_instance_pool_id", driver_instance_pool_id)
if driver_node_type_id is not None:
pulumi.set(__self__, "driver_node_type_id", driver_node_type_id)
if enable_elastic_disk is not None:
pulumi.set(__self__, "enable_elastic_disk", enable_elastic_disk)
if enable_local_disk_encryption is not None:
pulumi.set(__self__, "enable_local_disk_encryption", enable_local_disk_encryption)
if gcp_attributes is not None:
pulumi.set(__self__, "gcp_attributes", gcp_attributes)
if idempotency_token is not None:
pulumi.set(__self__, "idempotency_token", idempotency_token)
if init_scripts is not None:
pulumi.set(__self__, "init_scripts", init_scripts)
if instance_pool_id is not None:
pulumi.set(__self__, "instance_pool_id", instance_pool_id)
if is_pinned is not None:
pulumi.set(__self__, "is_pinned", is_pinned)
if libraries is not None:
pulumi.set(__self__, "libraries", libraries)
if node_type_id is not None:
pulumi.set(__self__, "node_type_id", node_type_id)
if num_workers is not None:
pulumi.set(__self__, "num_workers", num_workers)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if single_user_name is not None:
pulumi.set(__self__, "single_user_name", single_user_name)
if spark_conf is not None:
pulumi.set(__self__, "spark_conf", spark_conf)
if spark_env_vars is not None:
pulumi.set(__self__, "spark_env_vars", spark_env_vars)
if ssh_public_keys is not None:
pulumi.set(__self__, "ssh_public_keys", ssh_public_keys)
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> pulumi.Input[str]:
return pulumi.get(self, "spark_version")
@spark_version.setter
def spark_version(self, value: pulumi.Input[str]):
pulumi.set(self, "spark_version", value)
@property
@pulumi.getter
def autoscale(self) -> Optional[pulumi.Input['ClusterAutoscaleArgs']]:
return pulumi.get(self, "autoscale")
@autoscale.setter
def autoscale(self, value: Optional[pulumi.Input['ClusterAutoscaleArgs']]):
pulumi.set(self, "autoscale", value)
@property
@pulumi.getter(name="autoterminationMinutes")
def autotermination_minutes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "autotermination_minutes")
@autotermination_minutes.setter
def autotermination_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "autotermination_minutes", value)
@property
@pulumi.getter(name="awsAttributes")
def aws_attributes(self) -> Optional[pulumi.Input['ClusterAwsAttributesArgs']]:
return pulumi.get(self, "aws_attributes")
@aws_attributes.setter
def aws_attributes(self, value: Optional[pulumi.Input['ClusterAwsAttributesArgs']]):
pulumi.set(self, "aws_attributes", value)
@property
@pulumi.getter(name="azureAttributes")
def azure_attributes(self) -> Optional[pulumi.Input['ClusterAzureAttributesArgs']]:
return pulumi.get(self, "azure_attributes")
@azure_attributes.setter
def azure_attributes(self, value: Optional[pulumi.Input['ClusterAzureAttributesArgs']]):
pulumi.set(self, "azure_attributes", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="clusterLogConf")
def cluster_log_conf(self) -> Optional[pulumi.Input['ClusterClusterLogConfArgs']]:
return pulumi.get(self, "cluster_log_conf")
@cluster_log_conf.setter
def cluster_log_conf(self, value: Optional[pulumi.Input['ClusterClusterLogConfArgs']]):
pulumi.set(self, "cluster_log_conf", value)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="customTags")
def custom_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "custom_tags")
@custom_tags.setter
def custom_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "custom_tags", value)
@property
@pulumi.getter(name="dataSecurityMode")
def data_security_mode(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data_security_mode")
@data_security_mode.setter
def data_security_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_security_mode", value)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input['ClusterDockerImageArgs']]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input['ClusterDockerImageArgs']]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="driverInstancePoolId")
def driver_instance_pool_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "driver_instance_pool_id")
@driver_instance_pool_id.setter
def driver_instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "driver_instance_pool_id", value)
@property
@pulumi.getter(name="driverNodeTypeId")
def driver_node_type_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "driver_node_type_id")
@driver_node_type_id.setter
def driver_node_type_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "driver_node_type_id", value)
@property
@pulumi.getter(name="enableElasticDisk")
def enable_elastic_disk(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_elastic_disk")
@enable_elastic_disk.setter
def enable_elastic_disk(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_elastic_disk", value)
@property
@pulumi.getter(name="enableLocalDiskEncryption")
def enable_local_disk_encryption(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_local_disk_encryption")
@enable_local_disk_encryption.setter
def enable_local_disk_encryption(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_local_disk_encryption", value)
@property
@pulumi.getter(name="gcpAttributes")
def gcp_attributes(self) -> Optional[pulumi.Input['ClusterGcpAttributesArgs']]:
return pulumi.get(self, "gcp_attributes")
@gcp_attributes.setter
def gcp_attributes(self, value: Optional[pulumi.Input['ClusterGcpAttributesArgs']]):
pulumi.set(self, "gcp_attributes", value)
@property
@pulumi.getter(name="idempotencyToken")
def idempotency_token(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "idempotency_token")
@idempotency_token.setter
def idempotency_token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "idempotency_token", value)
@property
@pulumi.getter(name="initScripts")
def init_scripts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]]:
return pulumi.get(self, "init_scripts")
@init_scripts.setter
def init_scripts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]]):
pulumi.set(self, "init_scripts", value)
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "instance_pool_id")
@instance_pool_id.setter
def instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_pool_id", value)
@property
@pulumi.getter(name="isPinned")
def is_pinned(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "is_pinned")
@is_pinned.setter
def is_pinned(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_pinned", value)
@property
@pulumi.getter
def libraries(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]]:
return pulumi.get(self, "libraries")
@libraries.setter
def libraries(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]]):
pulumi.set(self, "libraries", value)
@property
@pulumi.getter(name="nodeTypeId")
def node_type_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "node_type_id")
@node_type_id.setter
def node_type_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_id", value)
@property
@pulumi.getter(name="numWorkers")
def num_workers(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "num_workers")
@num_workers.setter
def num_workers(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_workers", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter(name="singleUserName")
def single_user_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "single_user_name")
@single_user_name.setter
def single_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "single_user_name", value)
@property
@pulumi.getter(name="sparkConf")
def spark_conf(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "spark_conf")
@spark_conf.setter
def spark_conf(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "spark_conf", value)
@property
@pulumi.getter(name="sparkEnvVars")
def spark_env_vars(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "spark_env_vars")
@spark_env_vars.setter
def spark_env_vars(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "spark_env_vars", value)
@property
@pulumi.getter(name="sshPublicKeys")
def ssh_public_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "ssh_public_keys")
@ssh_public_keys.setter
def ssh_public_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ssh_public_keys", value)
@pulumi.input_type
class _ClusterState:
def __init__(__self__, *,
autoscale: Optional[pulumi.Input['ClusterAutoscaleArgs']] = None,
autotermination_minutes: Optional[pulumi.Input[int]] = None,
aws_attributes: Optional[pulumi.Input['ClusterAwsAttributesArgs']] = None,
azure_attributes: Optional[pulumi.Input['ClusterAzureAttributesArgs']] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_log_conf: Optional[pulumi.Input['ClusterClusterLogConfArgs']] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
custom_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
data_security_mode: Optional[pulumi.Input[str]] = None,
default_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
docker_image: Optional[pulumi.Input['ClusterDockerImageArgs']] = None,
driver_instance_pool_id: Optional[pulumi.Input[str]] = None,
driver_node_type_id: Optional[pulumi.Input[str]] = None,
enable_elastic_disk: Optional[pulumi.Input[bool]] = None,
enable_local_disk_encryption: Optional[pulumi.Input[bool]] = None,
gcp_attributes: Optional[pulumi.Input['ClusterGcpAttributesArgs']] = None,
idempotency_token: Optional[pulumi.Input[str]] = None,
init_scripts: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
is_pinned: Optional[pulumi.Input[bool]] = None,
libraries: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]] = None,
node_type_id: Optional[pulumi.Input[str]] = None,
num_workers: Optional[pulumi.Input[int]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
single_user_name: Optional[pulumi.Input[str]] = None,
spark_conf: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_env_vars: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
ssh_public_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
state: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Cluster resources.
"""
if autoscale is not None:
pulumi.set(__self__, "autoscale", autoscale)
if autotermination_minutes is not None:
pulumi.set(__self__, "autotermination_minutes", autotermination_minutes)
if aws_attributes is not None:
pulumi.set(__self__, "aws_attributes", aws_attributes)
if azure_attributes is not None:
pulumi.set(__self__, "azure_attributes", azure_attributes)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if cluster_log_conf is not None:
pulumi.set(__self__, "cluster_log_conf", cluster_log_conf)
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if custom_tags is not None:
pulumi.set(__self__, "custom_tags", custom_tags)
if data_security_mode is not None:
pulumi.set(__self__, "data_security_mode", data_security_mode)
if default_tags is not None:
pulumi.set(__self__, "default_tags", default_tags)
if docker_image is not None:
pulumi.set(__self__, "docker_image", docker_image)
if driver_instance_pool_id is not None:
pulumi.set(__self__, "driver_instance_pool_id", driver_instance_pool_id)
if driver_node_type_id is not None:
pulumi.set(__self__, "driver_node_type_id", driver_node_type_id)
if enable_elastic_disk is not None:
pulumi.set(__self__, "enable_elastic_disk", enable_elastic_disk)
if enable_local_disk_encryption is not None:
pulumi.set(__self__, "enable_local_disk_encryption", enable_local_disk_encryption)
if gcp_attributes is not None:
pulumi.set(__self__, "gcp_attributes", gcp_attributes)
if idempotency_token is not None:
pulumi.set(__self__, "idempotency_token", idempotency_token)
if init_scripts is not None:
pulumi.set(__self__, "init_scripts", init_scripts)
if instance_pool_id is not None:
pulumi.set(__self__, "instance_pool_id", instance_pool_id)
if is_pinned is not None:
pulumi.set(__self__, "is_pinned", is_pinned)
if libraries is not None:
pulumi.set(__self__, "libraries", libraries)
if node_type_id is not None:
pulumi.set(__self__, "node_type_id", node_type_id)
if num_workers is not None:
pulumi.set(__self__, "num_workers", num_workers)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if single_user_name is not None:
pulumi.set(__self__, "single_user_name", single_user_name)
if spark_conf is not None:
pulumi.set(__self__, "spark_conf", spark_conf)
if spark_env_vars is not None:
pulumi.set(__self__, "spark_env_vars", spark_env_vars)
if spark_version is not None:
pulumi.set(__self__, "spark_version", spark_version)
if ssh_public_keys is not None:
pulumi.set(__self__, "ssh_public_keys", ssh_public_keys)
if state is not None:
pulumi.set(__self__, "state", state)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def autoscale(self) -> Optional[pulumi.Input['ClusterAutoscaleArgs']]:
return pulumi.get(self, "autoscale")
@autoscale.setter
def autoscale(self, value: Optional[pulumi.Input['ClusterAutoscaleArgs']]):
pulumi.set(self, "autoscale", value)
@property
@pulumi.getter(name="autoterminationMinutes")
def autotermination_minutes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "autotermination_minutes")
@autotermination_minutes.setter
def autotermination_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "autotermination_minutes", value)
@property
@pulumi.getter(name="awsAttributes")
def aws_attributes(self) -> Optional[pulumi.Input['ClusterAwsAttributesArgs']]:
return pulumi.get(self, "aws_attributes")
@aws_attributes.setter
def aws_attributes(self, value: Optional[pulumi.Input['ClusterAwsAttributesArgs']]):
pulumi.set(self, "aws_attributes", value)
@property
@pulumi.getter(name="azureAttributes")
def azure_attributes(self) -> Optional[pulumi.Input['ClusterAzureAttributesArgs']]:
return pulumi.get(self, "azure_attributes")
@azure_attributes.setter
def azure_attributes(self, value: Optional[pulumi.Input['ClusterAzureAttributesArgs']]):
pulumi.set(self, "azure_attributes", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="clusterLogConf")
def cluster_log_conf(self) -> Optional[pulumi.Input['ClusterClusterLogConfArgs']]:
return pulumi.get(self, "cluster_log_conf")
@cluster_log_conf.setter
def cluster_log_conf(self, value: Optional[pulumi.Input['ClusterClusterLogConfArgs']]):
pulumi.set(self, "cluster_log_conf", value)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="customTags")
def custom_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "custom_tags")
@custom_tags.setter
def custom_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "custom_tags", value)
@property
@pulumi.getter(name="dataSecurityMode")
def data_security_mode(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "data_security_mode")
@data_security_mode.setter
def data_security_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_security_mode", value)
@property
@pulumi.getter(name="defaultTags")
def default_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "default_tags")
@default_tags.setter
def default_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "default_tags", value)
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> Optional[pulumi.Input['ClusterDockerImageArgs']]:
return pulumi.get(self, "docker_image")
@docker_image.setter
def docker_image(self, value: Optional[pulumi.Input['ClusterDockerImageArgs']]):
pulumi.set(self, "docker_image", value)
@property
@pulumi.getter(name="driverInstancePoolId")
def driver_instance_pool_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "driver_instance_pool_id")
@driver_instance_pool_id.setter
def driver_instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "driver_instance_pool_id", value)
@property
@pulumi.getter(name="driverNodeTypeId")
def driver_node_type_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "driver_node_type_id")
@driver_node_type_id.setter
def driver_node_type_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "driver_node_type_id", value)
@property
@pulumi.getter(name="enableElasticDisk")
def enable_elastic_disk(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_elastic_disk")
@enable_elastic_disk.setter
def enable_elastic_disk(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_elastic_disk", value)
@property
@pulumi.getter(name="enableLocalDiskEncryption")
def enable_local_disk_encryption(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_local_disk_encryption")
@enable_local_disk_encryption.setter
def enable_local_disk_encryption(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_local_disk_encryption", value)
@property
@pulumi.getter(name="gcpAttributes")
def gcp_attributes(self) -> Optional[pulumi.Input['ClusterGcpAttributesArgs']]:
return pulumi.get(self, "gcp_attributes")
@gcp_attributes.setter
def gcp_attributes(self, value: Optional[pulumi.Input['ClusterGcpAttributesArgs']]):
pulumi.set(self, "gcp_attributes", value)
@property
@pulumi.getter(name="idempotencyToken")
def idempotency_token(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "idempotency_token")
@idempotency_token.setter
def idempotency_token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "idempotency_token", value)
@property
@pulumi.getter(name="initScripts")
def init_scripts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]]:
return pulumi.get(self, "init_scripts")
@init_scripts.setter
def init_scripts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterInitScriptArgs']]]]):
pulumi.set(self, "init_scripts", value)
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "instance_pool_id")
@instance_pool_id.setter
def instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_pool_id", value)
@property
@pulumi.getter(name="isPinned")
def is_pinned(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "is_pinned")
@is_pinned.setter
def is_pinned(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_pinned", value)
@property
@pulumi.getter
def libraries(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]]:
return pulumi.get(self, "libraries")
@libraries.setter
def libraries(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterLibraryArgs']]]]):
pulumi.set(self, "libraries", value)
@property
@pulumi.getter(name="nodeTypeId")
def node_type_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "node_type_id")
@node_type_id.setter
def node_type_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type_id", value)
@property
@pulumi.getter(name="numWorkers")
def num_workers(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "num_workers")
@num_workers.setter
def num_workers(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_workers", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter(name="singleUserName")
def single_user_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "single_user_name")
@single_user_name.setter
def single_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "single_user_name", value)
@property
@pulumi.getter(name="sparkConf")
def spark_conf(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "spark_conf")
@spark_conf.setter
def spark_conf(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "spark_conf", value)
@property
@pulumi.getter(name="sparkEnvVars")
def spark_env_vars(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "spark_env_vars")
@spark_env_vars.setter
def spark_env_vars(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "spark_env_vars", value)
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "spark_version")
@spark_version.setter
def spark_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "spark_version", value)
@property
@pulumi.getter(name="sshPublicKeys")
def ssh_public_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "ssh_public_keys")
@ssh_public_keys.setter
def ssh_public_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ssh_public_keys", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autoscale: Optional[pulumi.Input[pulumi.InputType['ClusterAutoscaleArgs']]] = None,
autotermination_minutes: Optional[pulumi.Input[int]] = None,
aws_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAwsAttributesArgs']]] = None,
azure_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAzureAttributesArgs']]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_log_conf: Optional[pulumi.Input[pulumi.InputType['ClusterClusterLogConfArgs']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
custom_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
data_security_mode: Optional[pulumi.Input[str]] = None,
docker_image: Optional[pulumi.Input[pulumi.InputType['ClusterDockerImageArgs']]] = None,
driver_instance_pool_id: Optional[pulumi.Input[str]] = None,
driver_node_type_id: Optional[pulumi.Input[str]] = None,
enable_elastic_disk: Optional[pulumi.Input[bool]] = None,
enable_local_disk_encryption: Optional[pulumi.Input[bool]] = None,
gcp_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterGcpAttributesArgs']]] = None,
idempotency_token: Optional[pulumi.Input[str]] = None,
init_scripts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterInitScriptArgs']]]]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
is_pinned: Optional[pulumi.Input[bool]] = None,
libraries: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterLibraryArgs']]]]] = None,
node_type_id: Optional[pulumi.Input[str]] = None,
num_workers: Optional[pulumi.Input[int]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
single_user_name: Optional[pulumi.Input[str]] = None,
spark_conf: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_env_vars: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
ssh_public_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Create a Cluster resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Cluster resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autoscale: Optional[pulumi.Input[pulumi.InputType['ClusterAutoscaleArgs']]] = None,
autotermination_minutes: Optional[pulumi.Input[int]] = None,
aws_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAwsAttributesArgs']]] = None,
azure_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAzureAttributesArgs']]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_log_conf: Optional[pulumi.Input[pulumi.InputType['ClusterClusterLogConfArgs']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
custom_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
data_security_mode: Optional[pulumi.Input[str]] = None,
docker_image: Optional[pulumi.Input[pulumi.InputType['ClusterDockerImageArgs']]] = None,
driver_instance_pool_id: Optional[pulumi.Input[str]] = None,
driver_node_type_id: Optional[pulumi.Input[str]] = None,
enable_elastic_disk: Optional[pulumi.Input[bool]] = None,
enable_local_disk_encryption: Optional[pulumi.Input[bool]] = None,
gcp_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterGcpAttributesArgs']]] = None,
idempotency_token: Optional[pulumi.Input[str]] = None,
init_scripts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterInitScriptArgs']]]]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
is_pinned: Optional[pulumi.Input[bool]] = None,
libraries: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterLibraryArgs']]]]] = None,
node_type_id: Optional[pulumi.Input[str]] = None,
num_workers: Optional[pulumi.Input[int]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
single_user_name: Optional[pulumi.Input[str]] = None,
spark_conf: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_env_vars: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
ssh_public_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["autoscale"] = autoscale
__props__.__dict__["autotermination_minutes"] = autotermination_minutes
__props__.__dict__["aws_attributes"] = aws_attributes
__props__.__dict__["azure_attributes"] = azure_attributes
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["cluster_log_conf"] = cluster_log_conf
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["custom_tags"] = custom_tags
__props__.__dict__["data_security_mode"] = data_security_mode
__props__.__dict__["docker_image"] = docker_image
__props__.__dict__["driver_instance_pool_id"] = driver_instance_pool_id
__props__.__dict__["driver_node_type_id"] = driver_node_type_id
__props__.__dict__["enable_elastic_disk"] = enable_elastic_disk
__props__.__dict__["enable_local_disk_encryption"] = enable_local_disk_encryption
__props__.__dict__["gcp_attributes"] = gcp_attributes
__props__.__dict__["idempotency_token"] = idempotency_token
__props__.__dict__["init_scripts"] = init_scripts
__props__.__dict__["instance_pool_id"] = instance_pool_id
__props__.__dict__["is_pinned"] = is_pinned
__props__.__dict__["libraries"] = libraries
__props__.__dict__["node_type_id"] = node_type_id
__props__.__dict__["num_workers"] = num_workers
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["single_user_name"] = single_user_name
__props__.__dict__["spark_conf"] = spark_conf
__props__.__dict__["spark_env_vars"] = spark_env_vars
if spark_version is None and not opts.urn:
raise TypeError("Missing required property 'spark_version'")
__props__.__dict__["spark_version"] = spark_version
__props__.__dict__["ssh_public_keys"] = ssh_public_keys
__props__.__dict__["default_tags"] = None
__props__.__dict__["state"] = None
__props__.__dict__["url"] = None
super(Cluster, __self__).__init__(
'databricks:databricks/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
autoscale: Optional[pulumi.Input[pulumi.InputType['ClusterAutoscaleArgs']]] = None,
autotermination_minutes: Optional[pulumi.Input[int]] = None,
aws_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAwsAttributesArgs']]] = None,
azure_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterAzureAttributesArgs']]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_log_conf: Optional[pulumi.Input[pulumi.InputType['ClusterClusterLogConfArgs']]] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
custom_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
data_security_mode: Optional[pulumi.Input[str]] = None,
default_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
docker_image: Optional[pulumi.Input[pulumi.InputType['ClusterDockerImageArgs']]] = None,
driver_instance_pool_id: Optional[pulumi.Input[str]] = None,
driver_node_type_id: Optional[pulumi.Input[str]] = None,
enable_elastic_disk: Optional[pulumi.Input[bool]] = None,
enable_local_disk_encryption: Optional[pulumi.Input[bool]] = None,
gcp_attributes: Optional[pulumi.Input[pulumi.InputType['ClusterGcpAttributesArgs']]] = None,
idempotency_token: Optional[pulumi.Input[str]] = None,
init_scripts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterInitScriptArgs']]]]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
is_pinned: Optional[pulumi.Input[bool]] = None,
libraries: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterLibraryArgs']]]]] = None,
node_type_id: Optional[pulumi.Input[str]] = None,
num_workers: Optional[pulumi.Input[int]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
single_user_name: Optional[pulumi.Input[str]] = None,
spark_conf: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_env_vars: Optional[pulumi.Input[Mapping[str, Any]]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
ssh_public_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
state: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterState.__new__(_ClusterState)
__props__.__dict__["autoscale"] = autoscale
__props__.__dict__["autotermination_minutes"] = autotermination_minutes
__props__.__dict__["aws_attributes"] = aws_attributes
__props__.__dict__["azure_attributes"] = azure_attributes
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["cluster_log_conf"] = cluster_log_conf
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["custom_tags"] = custom_tags
__props__.__dict__["data_security_mode"] = data_security_mode
__props__.__dict__["default_tags"] = default_tags
__props__.__dict__["docker_image"] = docker_image
__props__.__dict__["driver_instance_pool_id"] = driver_instance_pool_id
__props__.__dict__["driver_node_type_id"] = driver_node_type_id
__props__.__dict__["enable_elastic_disk"] = enable_elastic_disk
__props__.__dict__["enable_local_disk_encryption"] = enable_local_disk_encryption
__props__.__dict__["gcp_attributes"] = gcp_attributes
__props__.__dict__["idempotency_token"] = idempotency_token
__props__.__dict__["init_scripts"] = init_scripts
__props__.__dict__["instance_pool_id"] = instance_pool_id
__props__.__dict__["is_pinned"] = is_pinned
__props__.__dict__["libraries"] = libraries
__props__.__dict__["node_type_id"] = node_type_id
__props__.__dict__["num_workers"] = num_workers
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["single_user_name"] = single_user_name
__props__.__dict__["spark_conf"] = spark_conf
__props__.__dict__["spark_env_vars"] = spark_env_vars
__props__.__dict__["spark_version"] = spark_version
__props__.__dict__["ssh_public_keys"] = ssh_public_keys
__props__.__dict__["state"] = state
__props__.__dict__["url"] = url
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def autoscale(self) -> pulumi.Output[Optional['outputs.ClusterAutoscale']]:
return pulumi.get(self, "autoscale")
@property
@pulumi.getter(name="autoterminationMinutes")
def autotermination_minutes(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "autotermination_minutes")
@property
@pulumi.getter(name="awsAttributes")
def aws_attributes(self) -> pulumi.Output[Optional['outputs.ClusterAwsAttributes']]:
return pulumi.get(self, "aws_attributes")
@property
@pulumi.getter(name="azureAttributes")
def azure_attributes(self) -> pulumi.Output[Optional['outputs.ClusterAzureAttributes']]:
return pulumi.get(self, "azure_attributes")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="clusterLogConf")
def cluster_log_conf(self) -> pulumi.Output[Optional['outputs.ClusterClusterLogConf']]:
return pulumi.get(self, "cluster_log_conf")
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "cluster_name")
@property
@pulumi.getter(name="customTags")
def custom_tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
return pulumi.get(self, "custom_tags")
@property
@pulumi.getter(name="dataSecurityMode")
def data_security_mode(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "data_security_mode")
@property
@pulumi.getter(name="defaultTags")
def default_tags(self) -> pulumi.Output[Mapping[str, Any]]:
return pulumi.get(self, "default_tags")
@property
@pulumi.getter(name="dockerImage")
def docker_image(self) -> pulumi.Output[Optional['outputs.ClusterDockerImage']]:
return pulumi.get(self, "docker_image")
@property
@pulumi.getter(name="driverInstancePoolId")
def driver_instance_pool_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "driver_instance_pool_id")
@property
@pulumi.getter(name="driverNodeTypeId")
def driver_node_type_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "driver_node_type_id")
@property
@pulumi.getter(name="enableElasticDisk")
def enable_elastic_disk(self) -> pulumi.Output[bool]:
return pulumi.get(self, "enable_elastic_disk")
@property
@pulumi.getter(name="enableLocalDiskEncryption")
def enable_local_disk_encryption(self) -> pulumi.Output[bool]:
return pulumi.get(self, "enable_local_disk_encryption")
@property
@pulumi.getter(name="gcpAttributes")
def gcp_attributes(self) -> pulumi.Output[Optional['outputs.ClusterGcpAttributes']]:
return pulumi.get(self, "gcp_attributes")
@property
@pulumi.getter(name="idempotencyToken")
def idempotency_token(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "idempotency_token")
@property
@pulumi.getter(name="initScripts")
def init_scripts(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterInitScript']]]:
return pulumi.get(self, "init_scripts")
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "instance_pool_id")
@property
@pulumi.getter(name="isPinned")
def is_pinned(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "is_pinned")
@property
@pulumi.getter
def libraries(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterLibrary']]]:
return pulumi.get(self, "libraries")
@property
@pulumi.getter(name="nodeTypeId")
def node_type_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "node_type_id")
@property
@pulumi.getter(name="numWorkers")
def num_workers(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "num_workers")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter(name="singleUserName")
def single_user_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "single_user_name")
@property
@pulumi.getter(name="sparkConf")
def spark_conf(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
return pulumi.get(self, "spark_conf")
@property
@pulumi.getter(name="sparkEnvVars")
def spark_env_vars(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
return pulumi.get(self, "spark_env_vars")
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> pulumi.Output[str]:
return pulumi.get(self, "spark_version")
@property
@pulumi.getter(name="sshPublicKeys")
def ssh_public_keys(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "ssh_public_keys")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
return pulumi.get(self, "url")
| 45.48474
| 134
| 0.671226
| 5,834
| 50,670
| 5.49143
| 0.036682
| 0.101289
| 0.15479
| 0.071417
| 0.937042
| 0.926991
| 0.909511
| 0.90127
| 0.887973
| 0.843088
| 0
| 0.000025
| 0.210006
| 50,670
| 1,113
| 135
| 45.525606
| 0.80032
| 0.021768
| 0
| 0.884074
| 1
| 0
| 0.140635
| 0.04727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169014
| false
| 0.001083
| 0.007584
| 0.097508
| 0.27844
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d2f466e4dbde7632b2e29a02ceb17a0e59a8dbe8
| 592
|
py
|
Python
|
deepscratch/models/layers/layer.py
|
mari-linhares/deep-python-scratch
|
b447ed20c981db5ffef810b6f80d1638cf7d2ccd
|
[
"MIT"
] | 9
|
2018-09-18T00:29:10.000Z
|
2021-02-20T17:58:30.000Z
|
deepscratch/models/layers/layer.py
|
Jagannathrk2020/deeplearning-from-scratch
|
b447ed20c981db5ffef810b6f80d1638cf7d2ccd
|
[
"MIT"
] | null | null | null |
deepscratch/models/layers/layer.py
|
Jagannathrk2020/deeplearning-from-scratch
|
b447ed20c981db5ffef810b6f80d1638cf7d2ccd
|
[
"MIT"
] | 3
|
2018-09-18T15:47:59.000Z
|
2020-08-09T03:10:34.000Z
|
import numpy as np
class Layer(object):
def __init__(self):
self.input_shape = None
def name(self):
return self.__class__.__name__
def forward(self, data):
return NotImplementedError()
def backward(self, grads, **kwargs):
return NotImplementedError()
def initialize(self, initializer, otimizer, input_shape, **kwargs):
pass
def output_shape(self):
return NotImplementedError()
def params(self):
return NotImplementedError()
def dparams(self):
return NotImplementedError()
| 21.925926
| 71
| 0.633446
| 59
| 592
| 6.101695
| 0.474576
| 0.347222
| 0.311111
| 0.177778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278716
| 592
| 27
| 72
| 21.925926
| 0.843091
| 0
| 0
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.055556
| 0.055556
| 0.333333
| 0.888889
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
96157ff85c00c83af8a8a98d6d6023d788f5a61c
| 247
|
py
|
Python
|
endochrone/classification/__init__.py
|
nickwood/endochrone
|
050a2604be82ee4cd5ee6357ea72d3d6d4117277
|
[
"MIT"
] | 2
|
2020-04-20T15:41:53.000Z
|
2021-11-25T18:52:20.000Z
|
endochrone/classification/__init__.py
|
nickwood/endochrone
|
050a2604be82ee4cd5ee6357ea72d3d6d4117277
|
[
"MIT"
] | null | null | null |
endochrone/classification/__init__.py
|
nickwood/endochrone
|
050a2604be82ee4cd5ee6357ea72d3d6d4117277
|
[
"MIT"
] | null | null | null |
from endochrone.classification.binary_tree import BinaryDecisionTree
from endochrone.classification.naive_knn import KNearest
from endochrone.classification.naive_bayes import NaiveBayes
__all__ = ['BinaryDecisionTree', 'KNearest', 'NaiveBayes']
| 41.166667
| 68
| 0.8583
| 25
| 247
| 8.2
| 0.52
| 0.204878
| 0.409756
| 0.321951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072874
| 247
| 5
| 69
| 49.4
| 0.895197
| 0
| 0
| 0
| 0
| 0
| 0.145749
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8296a9bdc0de4d105cae89654d5fc9dbdd6beabc
| 2,049
|
py
|
Python
|
src/share/art.py
|
davidli218/anonymChat
|
e233cb9a622f586d8de0025a65e04b824b760885
|
[
"MIT"
] | null | null | null |
src/share/art.py
|
davidli218/anonymChat
|
e233cb9a622f586d8de0025a65e04b824b760885
|
[
"MIT"
] | null | null | null |
src/share/art.py
|
davidli218/anonymChat
|
e233cb9a622f586d8de0025a65e04b824b760885
|
[
"MIT"
] | null | null | null |
__all__ = ('ascii_art_title_4client', 'ascii_art_title_4server')
ascii_art_title_4client = r"""
/$$$$$$ /$$ /$$
/$$__ $$| $$ | $$
/$$$$$$ /$$$$$$$ /$$$$$$ /$$$$$$$ /$$ /$$ /$$$$$$/$$$$ | $$ \__/| $$$$$$$ /$$$$$$ /$$$$$$
|____ $$| $$__ $$ /$$__ $$| $$__ $$| $$ | $$| $$_ $$_ $$| $$ | $$__ $$ |____ $$|_ $$_/
/$$$$$$$| $$ \ $$| $$ \ $$| $$ \ $$| $$ | $$| $$ \ $$ \ $$| $$ | $$ \ $$ /$$$$$$$ | $$
/$$__ $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$| $$ | $$ | $$| $$ $$| $$ | $$ /$$__ $$ | $$ /$$
| $$$$$$$| $$ | $$| $$$$$$/| $$ | $$| $$$$$$$| $$ | $$ | $$| $$$$$$/| $$ | $$| $$$$$$$ | $$$$/
\_______/|__/ |__/ \______/ |__/ |__/ \____ $$|__/ |__/ |__/ \______/ |__/ |__/ \_______/ \___/
/$$ | $$
| $$$$$$/
\______/
"""
ascii_art_title_4server = r"""
_____ _ _ _____
/ __ \ | | | / ___|
__ _ _ __ ___ _ __ _ _ _ __ ___ | / \/ |__ __ _| |_ \ `--. ___ _ ____ _____ _ __
/ _` | '_ \ / _ \| '_ \| | | | '_ ` _ \| | | '_ \ / _` | __| `--. \/ _ \ '__\ \ / / _ \ '__|
| (_| | | | | (_) | | | | |_| | | | | | | \__/\ | | | (_| | |_ /\__/ / __/ | \ V / __/ |
\__,_|_| |_|\___/|_| |_|\__, |_| |_| |_|\____/_| |_|\__,_|\__| \____/ \___|_| \_/ \___|_|
__/ |
|___/
"""
| 75.888889
| 151
| 0.155686
| 20
| 2,049
| 4.3
| 0.4
| 0.372093
| 0.604651
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004587
| 0.574427
| 2,049
| 26
| 152
| 78.807692
| 0.094037
| 0
| 0
| 0.083333
| 0
| 0.375
| 0.956564
| 0.02245
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
82dadb2753ef614cb4a88488fa266ad22309a460
| 57,061
|
py
|
Python
|
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import lsp_xc_traps
import policy
import rsvp
import ldp
import bfd
import dynamic_bypass
import mpls_interface
import autobw_template
import autobw_threshold_table
import cspf_group
import path
import bypass_lsp
import lsp
class mpls_cmds_holder(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /mpls-config/router/mpls/mpls-cmds-holder. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp_xc_traps','__policy','__rsvp','__ldp','__bfd','__dynamic_bypass','__mpls_interface','__autobw_template','__autobw_threshold_table','__cspf_group','__path','__bypass_lsp','__lsp',)
_yang_name = 'mpls-cmds-holder'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__autobw_threshold_table = YANGDynClass(base=autobw_threshold_table.autobw_threshold_table, is_container='container', presence=True, yang_name="autobw-threshold-table", rest_name="autobw-threshold-table", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Threshold Table', u'callpoint': u'MplsAutobwThresholdTable', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-autobw-threshold-table'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__autobw_template = YANGDynClass(base=YANGListType("autobw_template_name",autobw_template.autobw_template, yang_name="autobw-template", rest_name="autobw-template", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='autobw-template-name', extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}), is_container='list', yang_name="autobw-template", rest_name="autobw-template", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=True, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure BFD parameters', u'callpoint': u'MplsBfd', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__bypass_lsp = YANGDynClass(base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__mpls_interface = YANGDynClass(base=YANGListType("interface_type interface_name",mpls_interface.mpls_interface, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-type interface-name', extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}), is_container='list', yang_name="mpls-interface", rest_name="mpls-interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__cspf_group = YANGDynClass(base=YANGListType("cspf_group_name",cspf_group.cspf_group, yang_name="cspf-group", rest_name="cspf-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='cspf-group-name', extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}), is_container='list', yang_name="cspf-group", rest_name="cspf-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__ldp = YANGDynClass(base=ldp.ldp, is_container='container', presence=True, yang_name="ldp", rest_name="ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MPLS Protocol (MPLS)', u'callpoint': u'MplsLdp', u'cli-full-command': None, u'cli-add-mode': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-ldp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp = YANGDynClass(base=YANGListType("lsp_name",lsp.lsp, yang_name="lsp", rest_name="lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='lsp-name', extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}), is_container='list', yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__policy = YANGDynClass(base=policy.policy, is_container='container', presence=True, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS Policy Config mode', u'callpoint': u'MplsPolicy', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-policy'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__path = YANGDynClass(base=YANGListType("path_name",path.path, yang_name="path", rest_name="path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='path-name', extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}), is_container='list', yang_name="path", rest_name="path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
self.__dynamic_bypass = YANGDynClass(base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_xc_traps = YANGDynClass(base=lsp_xc_traps.lsp_xc_traps, is_container='container', presence=False, yang_name="lsp-xc-traps", rest_name="lsp-xc-traps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable the LSP XC up/down logging/traps', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__rsvp = YANGDynClass(base=rsvp.rsvp, is_container='container', presence=True, yang_name="rsvp", rest_name="rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS RSVP Config mode', u'callpoint': u'MplsRsvp', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-rsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-config', u'router', u'mpls', u'mpls-cmds-holder']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'mpls']
def _get_lsp_xc_traps(self):
"""
Getter method for lsp_xc_traps, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp_xc_traps (container)
"""
return self.__lsp_xc_traps
def _set_lsp_xc_traps(self, v, load=False):
"""
Setter method for lsp_xc_traps, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp_xc_traps (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_xc_traps is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_xc_traps() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_xc_traps.lsp_xc_traps, is_container='container', presence=False, yang_name="lsp-xc-traps", rest_name="lsp-xc-traps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable the LSP XC up/down logging/traps', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_xc_traps must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_xc_traps.lsp_xc_traps, is_container='container', presence=False, yang_name="lsp-xc-traps", rest_name="lsp-xc-traps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable the LSP XC up/down logging/traps', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_xc_traps = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_xc_traps(self):
self.__lsp_xc_traps = YANGDynClass(base=lsp_xc_traps.lsp_xc_traps, is_container='container', presence=False, yang_name="lsp-xc-traps", rest_name="lsp-xc-traps", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable the LSP XC up/down logging/traps', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_policy(self):
"""
Getter method for policy, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/policy (container)
"""
return self.__policy
def _set_policy(self, v, load=False):
"""
Setter method for policy, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/policy (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=policy.policy, is_container='container', presence=True, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS Policy Config mode', u'callpoint': u'MplsPolicy', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-policy'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """policy must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=policy.policy, is_container='container', presence=True, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS Policy Config mode', u'callpoint': u'MplsPolicy', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-policy'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__policy = t
if hasattr(self, '_set'):
self._set()
def _unset_policy(self):
self.__policy = YANGDynClass(base=policy.policy, is_container='container', presence=True, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS Policy Config mode', u'callpoint': u'MplsPolicy', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-policy'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_rsvp(self):
"""
Getter method for rsvp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/rsvp (container)
"""
return self.__rsvp
def _set_rsvp(self, v, load=False):
"""
Setter method for rsvp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/rsvp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_rsvp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rsvp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=rsvp.rsvp, is_container='container', presence=True, yang_name="rsvp", rest_name="rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS RSVP Config mode', u'callpoint': u'MplsRsvp', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-rsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rsvp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=rsvp.rsvp, is_container='container', presence=True, yang_name="rsvp", rest_name="rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS RSVP Config mode', u'callpoint': u'MplsRsvp', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-rsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__rsvp = t
if hasattr(self, '_set'):
self._set()
def _unset_rsvp(self):
self.__rsvp = YANGDynClass(base=rsvp.rsvp, is_container='container', presence=True, yang_name="rsvp", rest_name="rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enter MPLS RSVP Config mode', u'callpoint': u'MplsRsvp', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-rsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_ldp(self):
"""
Getter method for ldp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/ldp (container)
"""
return self.__ldp
def _set_ldp(self, v, load=False):
"""
Setter method for ldp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/ldp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ldp.ldp, is_container='container', presence=True, yang_name="ldp", rest_name="ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MPLS Protocol (MPLS)', u'callpoint': u'MplsLdp', u'cli-full-command': None, u'cli-add-mode': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-ldp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ldp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=ldp.ldp, is_container='container', presence=True, yang_name="ldp", rest_name="ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MPLS Protocol (MPLS)', u'callpoint': u'MplsLdp', u'cli-full-command': None, u'cli-add-mode': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-ldp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__ldp = t
if hasattr(self, '_set'):
self._set()
def _unset_ldp(self):
self.__ldp = YANGDynClass(base=ldp.ldp, is_container='container', presence=True, yang_name="ldp", rest_name="ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MPLS Protocol (MPLS)', u'callpoint': u'MplsLdp', u'cli-full-command': None, u'cli-add-mode': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-ldp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_bfd(self):
"""
Getter method for bfd, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bfd (container)
"""
return self.__bfd
def _set_bfd(self, v, load=False):
"""
Setter method for bfd, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bfd (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_bfd is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bfd() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=bfd.bfd, is_container='container', presence=True, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure BFD parameters', u'callpoint': u'MplsBfd', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bfd must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=bfd.bfd, is_container='container', presence=True, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure BFD parameters', u'callpoint': u'MplsBfd', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__bfd = t
if hasattr(self, '_set'):
self._set()
def _unset_bfd(self):
self.__bfd = YANGDynClass(base=bfd.bfd, is_container='container', presence=True, yang_name="bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure BFD parameters', u'callpoint': u'MplsBfd', u'cli-add-mode': None, u'cli-full-command': None, u'hidden': u'full', u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_dynamic_bypass(self):
"""
Getter method for dynamic_bypass, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/dynamic_bypass (container)
"""
return self.__dynamic_bypass
def _set_dynamic_bypass(self, v, load=False):
"""
Setter method for dynamic_bypass, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/dynamic_bypass (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_dynamic_bypass is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dynamic_bypass() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dynamic_bypass must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__dynamic_bypass = t
if hasattr(self, '_set'):
self._set()
def _unset_dynamic_bypass(self):
self.__dynamic_bypass = YANGDynClass(base=dynamic_bypass.dynamic_bypass, is_container='container', presence=True, yang_name="dynamic-bypass", rest_name="dynamic-bypass", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Dynamic bypass router level parameters', u'callpoint': u'MplsDynamicBypass', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-mpls-dynamic-bypass'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_mpls_interface(self):
"""
Getter method for mpls_interface, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface (list)
"""
return self.__mpls_interface
def _set_mpls_interface(self, v, load=False):
"""
Setter method for mpls_interface, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls_interface() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("interface_type interface_name",mpls_interface.mpls_interface, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-type interface-name', extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}), is_container='list', yang_name="mpls-interface", rest_name="mpls-interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mpls_interface must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("interface_type interface_name",mpls_interface.mpls_interface, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-type interface-name', extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}), is_container='list', yang_name="mpls-interface", rest_name="mpls-interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__mpls_interface = t
if hasattr(self, '_set'):
self._set()
def _unset_mpls_interface(self):
self.__mpls_interface = YANGDynClass(base=YANGListType("interface_type interface_name",mpls_interface.mpls_interface, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='interface-type interface-name', extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}), is_container='list', yang_name="mpls-interface", rest_name="mpls-interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define MPLS Interface', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsInterface', u'cli-mode-name': u'config-router-mpls-if-$(interface-type)-$(interface-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
def _get_autobw_template(self):
"""
Getter method for autobw_template, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/autobw_template (list)
"""
return self.__autobw_template
def _set_autobw_template(self, v, load=False):
"""
Setter method for autobw_template, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/autobw_template (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_autobw_template is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_autobw_template() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("autobw_template_name",autobw_template.autobw_template, yang_name="autobw-template", rest_name="autobw-template", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='autobw-template-name', extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}), is_container='list', yang_name="autobw-template", rest_name="autobw-template", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """autobw_template must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("autobw_template_name",autobw_template.autobw_template, yang_name="autobw-template", rest_name="autobw-template", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='autobw-template-name', extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}), is_container='list', yang_name="autobw-template", rest_name="autobw-template", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__autobw_template = t
if hasattr(self, '_set'):
self._set()
def _unset_autobw_template(self):
self.__autobw_template = YANGDynClass(base=YANGListType("autobw_template_name",autobw_template.autobw_template, yang_name="autobw-template", rest_name="autobw-template", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='autobw-template-name', extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}), is_container='list', yang_name="autobw-template", rest_name="autobw-template", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Template', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsAutobwTemplate', u'cli-mode-name': u'config-router-mpls-autobw-template-$(autobw-template-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
def _get_autobw_threshold_table(self):
"""
Getter method for autobw_threshold_table, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/autobw_threshold_table (container)
"""
return self.__autobw_threshold_table
def _set_autobw_threshold_table(self, v, load=False):
"""
Setter method for autobw_threshold_table, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/autobw_threshold_table (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_autobw_threshold_table is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_autobw_threshold_table() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=autobw_threshold_table.autobw_threshold_table, is_container='container', presence=True, yang_name="autobw-threshold-table", rest_name="autobw-threshold-table", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Threshold Table', u'callpoint': u'MplsAutobwThresholdTable', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-autobw-threshold-table'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """autobw_threshold_table must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=autobw_threshold_table.autobw_threshold_table, is_container='container', presence=True, yang_name="autobw-threshold-table", rest_name="autobw-threshold-table", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Threshold Table', u'callpoint': u'MplsAutobwThresholdTable', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-autobw-threshold-table'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__autobw_threshold_table = t
if hasattr(self, '_set'):
self._set()
def _unset_autobw_threshold_table(self):
self.__autobw_threshold_table = YANGDynClass(base=autobw_threshold_table.autobw_threshold_table, is_container='container', presence=True, yang_name="autobw-threshold-table", rest_name="autobw-threshold-table", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Autobw Threshold Table', u'callpoint': u'MplsAutobwThresholdTable', u'cli-add-mode': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-router-mpls-autobw-threshold-table'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_cspf_group(self):
"""
Getter method for cspf_group, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/cspf_group (list)
"""
return self.__cspf_group
def _set_cspf_group(self, v, load=False):
"""
Setter method for cspf_group, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/cspf_group (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_cspf_group is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cspf_group() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("cspf_group_name",cspf_group.cspf_group, yang_name="cspf-group", rest_name="cspf-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='cspf-group-name', extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}), is_container='list', yang_name="cspf-group", rest_name="cspf-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cspf_group must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("cspf_group_name",cspf_group.cspf_group, yang_name="cspf-group", rest_name="cspf-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='cspf-group-name', extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}), is_container='list', yang_name="cspf-group", rest_name="cspf-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__cspf_group = t
if hasattr(self, '_set'):
self._set()
def _unset_cspf_group(self):
self.__cspf_group = YANGDynClass(base=YANGListType("cspf_group_name",cspf_group.cspf_group, yang_name="cspf-group", rest_name="cspf-group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='cspf-group-name', extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}), is_container='list', yang_name="cspf-group", rest_name="cspf-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define a CSPF group', u'callpoint': u'MplsCspfGroup', u'cli-mode-name': u'config-router-cspf-group-$(cspf-group-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
def _get_path(self):
"""
Getter method for path, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/path (list)
"""
return self.__path
def _set_path(self, v, load=False):
"""
Setter method for path, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/path (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_path() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("path_name",path.path, yang_name="path", rest_name="path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='path-name', extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}), is_container='list', yang_name="path", rest_name="path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """path must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("path_name",path.path, yang_name="path", rest_name="path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='path-name', extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}), is_container='list', yang_name="path", rest_name="path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__path = t
if hasattr(self, '_set'):
self._set()
def _unset_path(self):
self.__path = YANGDynClass(base=YANGListType("path_name",path.path, yang_name="path", rest_name="path", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='path-name', extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}), is_container='list', yang_name="path", rest_name="path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Path', u'cli-no-key-completion': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'MplsPath', u'cli-mode-name': u'config-router-mpls-path-$(path-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
def _get_bypass_lsp(self):
"""
Getter method for bypass_lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bypass_lsp (list)
"""
return self.__bypass_lsp
def _set_bypass_lsp(self, v, load=False):
"""
Setter method for bypass_lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/bypass_lsp (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_bypass_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_bypass_lsp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """bypass_lsp must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__bypass_lsp = t
if hasattr(self, '_set'):
self._set()
def _unset_bypass_lsp(self):
self.__bypass_lsp = YANGDynClass(base=YANGListType("bypass_lsp_name",bypass_lsp.bypass_lsp, yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='bypass-lsp-name', extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}), is_container='list', yang_name="bypass-lsp", rest_name="bypass-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define Bypass LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsBypassLsp', u'cli-mode-name': u'config-router-mpls-bypass-lsp-$(bypass-lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
def _get_lsp(self):
"""
Getter method for lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp (list)
"""
return self.__lsp
def _set_lsp(self, v, load=False):
"""
Setter method for lsp, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("lsp_name",lsp.lsp, yang_name="lsp", rest_name="lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='lsp-name', extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}), is_container='list', yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("lsp_name",lsp.lsp, yang_name="lsp", rest_name="lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='lsp-name', extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}), is_container='list', yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)""",
})
self.__lsp = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp(self):
self.__lsp = YANGDynClass(base=YANGListType("lsp_name",lsp.lsp, yang_name="lsp", rest_name="lsp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='lsp-name', extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}), is_container='list', yang_name="lsp", rest_name="lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Define LSP', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'callpoint': u'MplsLsp', u'cli-mode-name': u'config-router-mpls-lsp-$(lsp-name)'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='list', is_config=True)
lsp_xc_traps = __builtin__.property(_get_lsp_xc_traps, _set_lsp_xc_traps)
policy = __builtin__.property(_get_policy, _set_policy)
rsvp = __builtin__.property(_get_rsvp, _set_rsvp)
ldp = __builtin__.property(_get_ldp, _set_ldp)
bfd = __builtin__.property(_get_bfd, _set_bfd)
dynamic_bypass = __builtin__.property(_get_dynamic_bypass, _set_dynamic_bypass)
mpls_interface = __builtin__.property(_get_mpls_interface, _set_mpls_interface)
autobw_template = __builtin__.property(_get_autobw_template, _set_autobw_template)
autobw_threshold_table = __builtin__.property(_get_autobw_threshold_table, _set_autobw_threshold_table)
cspf_group = __builtin__.property(_get_cspf_group, _set_cspf_group)
path = __builtin__.property(_get_path, _set_path)
bypass_lsp = __builtin__.property(_get_bypass_lsp, _set_bypass_lsp)
lsp = __builtin__.property(_get_lsp, _set_lsp)
_pyangbind_elements = {'lsp_xc_traps': lsp_xc_traps, 'policy': policy, 'rsvp': rsvp, 'ldp': ldp, 'bfd': bfd, 'dynamic_bypass': dynamic_bypass, 'mpls_interface': mpls_interface, 'autobw_template': autobw_template, 'autobw_threshold_table': autobw_threshold_table, 'cspf_group': cspf_group, 'path': path, 'bypass_lsp': bypass_lsp, 'lsp': lsp, }
| 102.627698
| 1,083
| 0.73679
| 8,433
| 57,061
| 4.789162
| 0.025377
| 0.02377
| 0.045757
| 0.033872
| 0.914725
| 0.902667
| 0.89769
| 0.892886
| 0.890262
| 0.883923
| 0
| 0.000117
| 0.104818
| 57,061
| 555
| 1,084
| 102.812613
| 0.790544
| 0.110198
| 0
| 0.478006
| 0
| 0.038123
| 0.47713
| 0.205467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123167
| false
| 0.076246
| 0.061584
| 0
| 0.293255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
7d74e2c738ac2f0a60efc37be5f5dd776506ac0f
| 772
|
py
|
Python
|
scripts/patches/transfer.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/patches/transfer.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
scripts/patches/transfer.py
|
compose-x/troposphere
|
9a94a8fafd8b4da1cd1f4239be0e7aa0681fd8d4
|
[
"BSD-2-Clause"
] | null | null | null |
patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
]
| 33.565217
| 96
| 0.604922
| 61
| 772
| 7.655738
| 0.295082
| 0.205567
| 0.308351
| 0.239829
| 0.937902
| 0.90364
| 0.646681
| 0.608137
| 0
| 0
| 0
| 0
| 0.212435
| 772
| 22
| 97
| 35.090909
| 0.768092
| 0
| 0
| 0.454545
| 0
| 0
| 0.665803
| 0.567358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7dad77e428516e43dd8aeb0eb56377ef305b314e
| 114
|
py
|
Python
|
main.py
|
thewh1teagle/whatsapp-web-multi-device-reveng
|
4dde22fcd45022b12ded94d704c2da4929f8dd98
|
[
"MIT"
] | 83
|
2021-07-19T13:50:15.000Z
|
2022-02-19T09:24:23.000Z
|
main.py
|
thewh1teagle/whatsapp-web-multi-device-reveng
|
4dde22fcd45022b12ded94d704c2da4929f8dd98
|
[
"MIT"
] | 2
|
2021-07-19T23:29:55.000Z
|
2021-09-28T12:22:50.000Z
|
main.py
|
thewh1teagle/whatsapp-web-multi-device-reveng
|
4dde22fcd45022b12ded94d704c2da4929f8dd98
|
[
"MIT"
] | 18
|
2021-07-19T13:49:33.000Z
|
2022-02-05T21:47:12.000Z
|
#!/usr/bin/env python3
import pocs.poc01_generate_qr
if __name__ == '__main__':
pocs.poc01_generate_qr.main()
| 16.285714
| 31
| 0.754386
| 17
| 114
| 4.352941
| 0.705882
| 0.243243
| 0.459459
| 0.513514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049505
| 0.114035
| 114
| 6
| 32
| 19
| 0.683168
| 0.184211
| 0
| 0
| 1
| 0
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
7de02cd942355c648774aef68df70e0589ab8bf5
| 17,555
|
py
|
Python
|
tests/api/v1/test_teams.py
|
cclauss/CTFd
|
7c60c697ee38a500b4584fbac14e2e424e48e4d4
|
[
"Apache-2.0"
] | null | null | null |
tests/api/v1/test_teams.py
|
cclauss/CTFd
|
7c60c697ee38a500b4584fbac14e2e424e48e4d4
|
[
"Apache-2.0"
] | null | null | null |
tests/api/v1/test_teams.py
|
cclauss/CTFd
|
7c60c697ee38a500b4584fbac14e2e424e48e4d4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from CTFd.utils import set_config
from CTFd.utils.crypto import verify_password
from tests.helpers import *
def test_api_teams_get_public():
"""Can a user get /api/v1/teams if teams are public"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
set_config('account_visibility', 'public')
r = client.get('/api/v1/teams')
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams')
assert r.status_code == 302
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams')
assert r.status_code == 404
destroy_ctfd(app)
def test_api_teams_get_private():
"""Can a user get /api/v1/teams if teams are private"""
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
with login_as_user(app) as client:
set_config('account_visibility', 'public')
r = client.get('/api/v1/teams')
print(r.__dict__)
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams')
assert r.status_code == 200
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams')
assert r.status_code == 404
destroy_ctfd(app)
def test_api_teams_get_admin():
"""Can a user get /api/v1/teams if teams are viewed by admins only"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with login_as_user(app, 'admin') as client:
set_config('account_visibility', 'public')
r = client.get('/api/v1/teams')
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams')
assert r.status_code == 200
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams')
assert r.status_code == 200
destroy_ctfd(app)
def test_api_teams_post_non_admin():
"""Can a user post /api/v1/teams if not admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.post('/api/v1/teams', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_teams_post_admin():
"""Can a user post /api/v1/teams if admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with login_as_user(app, 'admin') as client:
# Create team
r = client.post(
'/api/v1/teams',
json={
"website": "http://www.team.com",
"name": "team",
"country": "TW",
"email": "team@team.com",
"affiliation": "team",
"password": "password"
}
)
assert r.status_code == 200
# Make sure password was hashed properly
team = Teams.query.filter_by(email='team@team.com').first()
assert team
assert verify_password('password', team.password)
# Make sure team can actually be joined
register_user(app)
client = login_as_user(app)
with client.session_transaction() as sess:
data = {
"name": "team",
"password": "password",
"nonce": sess.get('nonce')
}
r = client.post('/teams/join', data=data)
user = Users.query.filter_by(id=2).first()
assert user.team_id == 1
destroy_ctfd(app)
def test_api_teams_post_admin_duplicate():
"""Test that admins can only create teams with unique information"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_team(app.db, name='team1')
with login_as_user(app, 'admin') as client:
# Duplicate name
r = client.post(
'/api/v1/teams',
json={
"website": "https://ctfd.io",
"name": "team1",
"country": "TW",
"email": "team1@ctfd.io",
"affiliation": "team",
"password": "password"
}
)
resp = r.get_json()
assert r.status_code == 400
assert resp['errors']['name']
assert resp['success'] is False
assert Teams.query.count() == 1
# Duplicate email
r = client.post(
'/api/v1/teams',
json={
"website": "https://ctfd.io",
"name": "new_team",
"country": "TW",
"email": "team@ctfd.io",
"affiliation": "team",
"password": "password"
}
)
resp = r.get_json()
assert r.status_code == 400
assert resp['errors']['email']
assert resp['success'] is False
assert Teams.query.count() == 1
destroy_ctfd(app)
def test_api_team_get_public():
"""Can a user get /api/v1/team/<team_id> if teams are public"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
set_config('account_visibility', 'public')
gen_team(app.db)
r = client.get('/api/v1/teams/1')
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams/1')
assert r.status_code == 302
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams/1')
assert r.status_code == 404
destroy_ctfd(app)
def test_api_team_get_private():
"""Can a user get /api/v1/teams/<team_id> if teams are private"""
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
with login_as_user(app) as client:
set_config('account_visibility', 'public')
gen_team(app.db)
r = client.get('/api/v1/teams/1')
print(r.__dict__)
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams/1')
assert r.status_code == 200
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams/1')
assert r.status_code == 404
destroy_ctfd(app)
def test_api_team_get_admin():
"""Can a user get /api/v1/teams/<team_id> if teams are viewed by admins only"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with login_as_user(app, 'admin') as client:
gen_team(app.db)
set_config('account_visibility', 'public')
r = client.get('/api/v1/teams/1')
assert r.status_code == 200
set_config('account_visibility', 'private')
r = client.get('/api/v1/teams/1')
assert r.status_code == 200
set_config('account_visibility', 'admins')
r = client.get('/api/v1/teams/1')
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_patch_non_admin():
"""Can a user patch /api/v1/teams/<team_id> if not admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_team(app.db)
with app.test_client() as client:
r = client.patch('/api/v1/teams/1', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_patch_admin():
"""Can a user patch /api/v1/teams/<team_id> if admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_team(app.db)
with login_as_user(app, 'admin') as client:
r = client.patch('/api/v1/teams/1', json={
"name": "team_name",
"email": "team@ctfd.io",
"password": "password",
"affiliation": "changed"
})
team = Teams.query.filter_by(id=1).first()
assert r.status_code == 200
assert r.get_json()['data']['affiliation'] == 'changed'
assert verify_password('password', team.password)
destroy_ctfd(app)
def test_api_team_delete_non_admin():
"""Can a user delete /api/v1/teams/<team_id> if not admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_team(app.db)
with app.test_client() as client:
r = client.delete('/api/v1/teams/1', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_delete_admin():
"""Can a user patch /api/v1/teams/<team_id> if admin"""
app = create_ctfd(user_mode="teams")
with app.app_context():
gen_team(app.db)
with login_as_user(app, 'admin') as client:
r = client.delete('/api/v1/teams/1', json="")
assert r.status_code == 200
assert r.get_json().get('data') is None
destroy_ctfd(app)
def test_api_team_get_me_not_logged_in():
"""Can a user get /api/v1/teams/me if not logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/teams/me')
assert r.status_code == 302
destroy_ctfd(app)
def test_api_team_get_me_logged_in():
"""Can a user get /api/v1/teams/me if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/me')
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_patch_me_not_logged_in():
"""Can a user patch /api/v1/teams/me if not logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.patch('/api/v1/teams/me', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_patch_me_logged_in():
"""Can a user patch /api/v1/teams/me if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.patch('/api/v1/teams/me', json={"name": "team_name", "affiliation": "changed"})
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_me_solves_not_logged_in():
"""Can a user get /api/v1/teams/me/solves if not logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/teams/me/solves')
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_get_me_solves_logged_in():
"""Can a user get /api/v1/teams/me/solves if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/me/solves')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_solves():
"""Can a user get /api/v1/teams/<team_id>/solves if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/1/solves')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_me_fails_not_logged_in():
"""Can a user get /api/v1/teams/me/fails if not logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/teams/me/fails')
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_get_me_fails_logged_in():
"""Can a user get /api/v1/teams/me/fails if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/me/fails')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_fails():
"""Can a user get /api/v1/teams/<team_id>/fails if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/1/fails')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_me_awards_not_logged_in():
"""Can a user get /api/v1/teams/me/awards if not logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/teams/me/awards')
assert r.status_code == 403
destroy_ctfd(app)
def test_api_team_get_me_awards_logged_in():
"""Can a user get /api/v1/teams/me/awards if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/me/awards')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_team_get_awards():
"""Can a user get /api/v1/teams/<team_id>/awards if logged in"""
app = create_ctfd(user_mode="teams")
with app.app_context():
user = gen_user(app.db)
team = gen_team(app.db)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="user_name") as client:
r = client.get('/api/v1/teams/1/awards')
print(r.get_json())
assert r.status_code == 200
destroy_ctfd(app)
def test_api_accessing_hidden_banned_users():
"""Hidden/Banned users should not be visible to normal users, only to admins"""
app = create_ctfd(user_mode="teams")
with app.app_context():
register_user(app)
register_user(app, name="user2", email="user2@ctfd.io")
register_user(app, name="visible_user", email="visible_user@ctfd.io")
user = Users.query.filter_by(id=2).first()
team = gen_team(app.db, name='hidden_team', email="hidden_team@ctfd.io", hidden=True)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
user = Users.query.filter_by(id=3).first()
team = gen_team(app.db, name='banned_team', email="banned_team@ctfd.io", banned=True)
team.members.append(user)
user.team_id = team.id
app.db.session.commit()
with login_as_user(app, name="visible_user") as client:
assert client.get('/api/v1/teams/1').status_code == 404
assert client.get('/api/v1/teams/1/solves').status_code == 404
assert client.get('/api/v1/teams/1/fails').status_code == 404
assert client.get('/api/v1/teams/1/awards').status_code == 404
assert client.get('/api/v1/teams/2').status_code == 404
assert client.get('/api/v1/teams/2/solves').status_code == 404
assert client.get('/api/v1/teams/2/fails').status_code == 404
assert client.get('/api/v1/teams/2/awards').status_code == 404
with login_as_user(app, name="admin") as client:
assert client.get('/api/v1/teams/1').status_code == 200
assert client.get('/api/v1/teams/1/solves').status_code == 200
assert client.get('/api/v1/teams/1/fails').status_code == 200
assert client.get('/api/v1/teams/1/awards').status_code == 200
assert client.get('/api/v1/teams/2').status_code == 200
assert client.get('/api/v1/teams/2/solves').status_code == 200
assert client.get('/api/v1/teams/2/fails').status_code == 200
assert client.get('/api/v1/teams/2/awards').status_code == 200
destroy_ctfd(app)
| 36.345756
| 102
| 0.580518
| 2,416
| 17,555
| 4.015315
| 0.05505
| 0.041233
| 0.081435
| 0.081744
| 0.899495
| 0.879806
| 0.865065
| 0.858571
| 0.83445
| 0.800742
| 0
| 0.02306
| 0.283623
| 17,555
| 482
| 103
| 36.421162
| 0.74833
| 0.093136
| 0
| 0.765013
| 0
| 0
| 0.143111
| 0.029218
| 0
| 0
| 0
| 0
| 0.174935
| 1
| 0.070496
| false
| 0.020888
| 0.007833
| 0
| 0.078329
| 0.020888
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81581e76eee90743d4cbe790aea7302578763d3f
| 7,603
|
py
|
Python
|
adet/modeling/nlos_converter/nlos_converter.py
|
pjh4993/nlos_detection
|
03f1ecd73eb6fce3c0ddfdf14eedaa049dd87f39
|
[
"BSD-2-Clause"
] | null | null | null |
adet/modeling/nlos_converter/nlos_converter.py
|
pjh4993/nlos_detection
|
03f1ecd73eb6fce3c0ddfdf14eedaa049dd87f39
|
[
"BSD-2-Clause"
] | null | null | null |
adet/modeling/nlos_converter/nlos_converter.py
|
pjh4993/nlos_detection
|
03f1ecd73eb6fce3c0ddfdf14eedaa049dd87f39
|
[
"BSD-2-Clause"
] | null | null | null |
from collections import defaultdict
from typing import List, Dict
import torch
from torch import nn
from torch.nn import functional as F
import math
from detectron2.layers import ShapeSpec, NaiveSyncBatchNorm
from adet.modeling.nlos_converter.build import NLOS_CONVERTER_REGISTRY
from adet.layers import DFConv2d, NaiveGroupNorm
from adet.utils.comm import compute_locations
@NLOS_CONVERTER_REGISTRY.register()
class conv_fc_nlos_converter(nn.Module):
def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
"""
"""
super().__init__()
head_configs = {"conv": cfg.MODEL.NLOS_CONVERTER.NUM_CONVS,
"fc": (cfg.MODEL.NLOS_CONVERTER.IN_FC_CHANNELS,
cfg.MODEL.NLOS_CONVERTER.OUT_FC_CHANNELS)}
norm = None if cfg.MODEL.NLOS_CONVERTER.NORM == "none" else cfg.MODEL.NLOS_CONVERTER.NORM
self.int_conv_channel = cfg.MODEL.NLOS_CONVERTER.INT_CONV_CHANNEL
self.in_features = cfg.MODEL.NLOS_CONVERTER.IN_FEATURES
input_shape = [input_shape[f] for f in self.in_features]
self.num_levels = len(input_shape)
self.laser_grid = cfg.NLOS.LASER_GRID
in_channels = [s.channels for s in input_shape]
assert len(set(in_channels)) == 1, "Each level must have the same channel!"
in_channels = in_channels[0]
assert self.int_conv_channel % 32 == 0 or norm == None
conv_tower = []
for _ in range(head_configs["conv"]):
conv_layer = nn.Conv2d(in_channels, self.int_conv_channel, kernel_size=3, stride=1, padding=1, bias=True)
in_channels = self.int_conv_channel
conv_tower.append(conv_layer)
if norm == "GN":
conv_tower.append(nn.GroupNorm(32, in_channels))
elif norm == "NaiveGN":
conv_tower.append(NaiveGroupNorm(32, in_channels))
self.add_module('{}_tower'.format("conv"), nn.Sequential(*conv_tower))
fc_per_level = []
for in_fc_channel, out_fc_channel, k in zip(head_configs["fc"][0], head_configs["fc"][1], self.in_features):
fc_layer = nn.Linear(in_fc_channel * self.int_conv_channel * 2 * (self.laser_grid **2), out_fc_channel[0] * out_fc_channel[1] * self.int_conv_channel)
torch.nn.init.xavier_uniform_(fc_layer.weight)
torch.nn.init.constant_(fc_layer.bias, 0)
fc_per_level.append(fc_layer)
self.add_module('{}_fc_layer'.format(k),
fc_layer)
for modules in [
self.conv_tower
]:
for l in modules.modules():
if isinstance(l, nn.Conv2d):
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
self.fc_per_level = fc_per_level
self.head_configs = head_configs
def forward(self, features: List[torch.Tensor]):
"""
"""
converted_feature = dict()
for k in self.in_features:
converted_feature[k] = []
for x in features:
x = [x[f] for f in self.in_features]
for k, v in zip(self.in_features, x):#, self.fc_per_level, self.head_configs["fc"][1]):
t = F.relu(self.conv_tower(v)).reshape(1,-1)
converted_feature[k].append(t)
for k, fc, output_shape in zip(self.in_features, self.fc_per_level, self.head_configs["fc"][1]):
t = torch.cat(converted_feature[k], dim=0)
N = t.shape[0]
converted_feature[k] = fc(t).reshape(N, self.int_conv_channel, output_shape[1], output_shape[0])
return converted_feature
def output_shape(self):
return {
name: ShapeSpec(
channels=self.int_conv_channel, stride=0
)
for name in self.in_features
}
@NLOS_CONVERTER_REGISTRY.register()
class channel_preserving_nlos_converter(nn.Module):
def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
"""
"""
super().__init__()
head_configs = {"conv": cfg.MODEL.NLOS_CONVERTER.NUM_CONVS,
"fc": (cfg.MODEL.NLOS_CONVERTER.IN_FC_CHANNELS,
cfg.MODEL.NLOS_CONVERTER.OUT_FC_CHANNELS)}
norm = None if cfg.MODEL.NLOS_CONVERTER.NORM == "none" else cfg.MODEL.NLOS_CONVERTER.NORM
self.int_conv_channel = cfg.MODEL.NLOS_CONVERTER.INT_CONV_CHANNEL
self.in_features = cfg.MODEL.NLOS_CONVERTER.IN_FEATURES
input_shape = [input_shape[f] for f in self.in_features]
self.num_levels = len(input_shape)
self.laser_grid = cfg.NLOS.LASER_GRID
in_channels = [s.channels for s in input_shape]
assert len(set(in_channels)) == 1, "Each level must have the same channel!"
in_channels = in_channels[0]
assert self.int_conv_channel % 32 == 0 or norm == None
conv_tower = []
for _ in range(head_configs["conv"]):
conv_layer = nn.Conv2d(in_channels, self.int_conv_channel, kernel_size=3, stride=1, padding=1, bias=True)
in_channels = self.int_conv_channel
conv_tower.append(conv_layer)
if norm == "GN":
conv_tower.append(nn.GroupNorm(32, in_channels))
elif norm == "NaiveGN":
conv_tower.append(NaiveGroupNorm(32, in_channels))
self.add_module('{}_tower'.format("conv"),
nn.Sequential(*conv_tower))
fc_per_level = []
for laser_wh, out_fc_channel, k in zip(head_configs["fc"][0], head_configs["fc"][1], self.in_features):
fc_layer = nn.Linear(laser_wh * (self.laser_grid **2), out_fc_channel[0] * out_fc_channel[1])
torch.nn.init.xavier_uniform_(fc_layer.weight)
torch.nn.init.constant_(fc_layer.bias, 0)
fc_per_level.append(fc_layer)
self.add_module('{}_fc_layer'.format(k),
fc_layer)
for modules in [
self.conv_tower
]:
for l in modules.modules():
if isinstance(l, nn.Conv2d):
torch.nn.init.normal_(l.weight, std=0.01)
torch.nn.init.constant_(l.bias, 0)
self.fc_per_level = fc_per_level
self.head_configs = head_configs
def forward(self, features: List[torch.Tensor]):
"""
"""
grid_size = self.laser_grid ** 2
converted_feature = dict()
for k in self.in_features:
converted_feature[k] = []
for x in features:
x = [x[f] for f in self.in_features]
for k, v in zip(self.in_features, x):#, self.fc_per_level, self.head_configs["fc"][1]):
t = F.relu(self.conv_tower(v))
t = t[torch.arange(start=0, end=grid_size * 2, step=2)] + t[torch.arange(start=1, end=grid_size *2, step=2)]
t = t.permute(1,0,2,3).reshape(1, self.int_conv_channel,-1)
converted_feature[k].append(t)
for k, fc, output_shape in zip(self.in_features, self.fc_per_level, self.head_configs["fc"][1]):
t = torch.cat(converted_feature[k], dim=0)
N = t.shape[0]
converted_feature[k] = fc(t).reshape(N, self.int_conv_channel, output_shape[1], output_shape[0])
return converted_feature
def output_shape(self):
return {
name: ShapeSpec(
channels=self.int_conv_channel, stride=0
)
for name in self.in_features
}
| 41.774725
| 162
| 0.606208
| 1,030
| 7,603
| 4.208738
| 0.127184
| 0.059977
| 0.054902
| 0.062284
| 0.888812
| 0.864591
| 0.864591
| 0.856286
| 0.856286
| 0.856286
| 0
| 0.014647
| 0.281599
| 7,603
| 182
| 163
| 41.774725
| 0.779019
| 0.01289
| 0
| 0.805556
| 0
| 0
| 0.024181
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 1
| 0.041667
| false
| 0
| 0.069444
| 0.013889
| 0.152778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81590cbd9be2a46d767c60cb9b80e67367f3e027
| 118,146
|
py
|
Python
|
json_test.py
|
krummas/cassandra-dtest
|
7e3bcfd52fbc926b4c43e258a7e0efa19e1ca13d
|
[
"Apache-2.0"
] | 52
|
2015-02-13T15:49:03.000Z
|
2020-11-15T10:59:20.000Z
|
json_test.py
|
krummas/cassandra-dtest
|
7e3bcfd52fbc926b4c43e258a7e0efa19e1ca13d
|
[
"Apache-2.0"
] | 1,232
|
2015-01-05T19:31:26.000Z
|
2020-06-07T02:59:43.000Z
|
json_test.py
|
krummas/cassandra-dtest
|
7e3bcfd52fbc926b4c43e258a7e0efa19e1ca13d
|
[
"Apache-2.0"
] | 91
|
2015-02-23T23:58:44.000Z
|
2020-05-24T11:05:03.000Z
|
import doctest
import inspect
import os
import re
import subprocess
import sys
from distutils.version import LooseVersion
from ccmlib import common
from ccmlib.common import is_win
from dtest import Tester
from tools.decorators import since
def build_doc_context(tester, test_name, prepare=True, connection=None, nodes=None):
"""
Takes an instance of dtest.Tester (or a subclass), completes some basic setup, and returns a
dict of "globs" to be used as a doctest execution context.
This provides a doctest environment to test cql via python driver, or cqlsh directly.
Cqlsh commands made via the cqlsh_* commands are sent (almost) exactly as provided, but a keyspace will be injected before commands for convenience.
If you prefer to customize setup, pass prepare=False and provide your own cql connection object and a list of ccm nodes.
When nodes are provided, the first node will be used for connections and cqlsh.
"""
default_ks_name = test_name
if prepare:
if connection or nodes:
raise RuntimeError("Cannot auto prepare doctest context when connection or nodes are provided.")
tester.cluster.populate(1).start()
nodes = tester.cluster.nodelist()
connection = tester.patient_cql_connection(nodes[0])
connection.execute("CREATE KEYSPACE {} WITH REPLICATION = {{'class': 'SimpleStrategy', 'replication_factor': 1}};".format(default_ks_name))
connection.execute("USE {}".format(default_ks_name))
else:
if not (connection and nodes):
raise RuntimeError("Cannot build doctest context without connection and nodes.")
def ks(new_ks):
"""
Change active keyspace
Makes cql request via python driver, and also hangs onto the ks_name
in case we are using cqlsh, since cqlsh is closed and will forget between requests.
"""
# if we are using regular cql this should stick
connection.execute("USE {};".format(new_ks))
# sneaky attribute on this func itself in case we are using cqlsh
# and need to inject a "USE" statement right before running a cqlsh query.
ks.current_ks = new_ks
def enabled_ks():
return getattr(ks, 'current_ks', default_ks_name)
def _cqlsh(cmds):
"""
Modified from cqlsh_tests.py
Attempts to make direct cqlsh communication.
"""
cdir = nodes[0].get_install_dir()
cli = os.path.join(cdir, 'bin', common.platform_binary('cqlsh'))
env = common.make_cassandra_env(cdir, nodes[0].get_path())
env['LANG'] = 'en_US.UTF-8'
# CASSANDRA-10428 changes the default time format to include microseconds (%f) but only
# for version 3.2 onwards, so we fix the default timestamp for the time-being, to
# avoid having multiple versions of these tests since it would be a bit messy to change the docstrings
env['CQLSH_DEFAULT_TIMESTAMP_FORMAT'] = '%Y-%m-%d %H:%M:%S%z'
if tester.cluster.version() >= LooseVersion('2.1'):
host = nodes[0].network_interfaces['binary'][0]
port = nodes[0].network_interfaces['binary'][1]
else:
host = nodes[0].network_interfaces['thrift'][0]
port = nodes[0].network_interfaces['thrift'][1]
args = [host, str(port)]
sys.stdout.flush()
p = subprocess.Popen([cli] + args, env=env, stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
p.stdin.write("USE {};".format(enabled_ks()))
for cmd in cmds.split(';'):
p.stdin.write(cmd + ';\n')
p.stdin.write("quit;\n") # may not be necesary, things could simplify a bit if removed
return p.communicate()
def cqlsh(cmds, supress_err=False):
"""
Run cqlsh commands and return regular output. If stderr occurs this will raise RuntimeError unless supress_err=True.
"""
output, err = _cqlsh(cmds)
if err and not supress_err:
raise RuntimeError("Unexpected cqlsh error: {}".format(err))
# if output is empty string we want to just return None
if output:
return output
def cqlsh_print(cmds, supress_err=False):
"""
Run cqlsh commands and print output.
"""
output = cqlsh(cmds, supress_err=supress_err)
# python coerces LF to OS-specific line-endings on print or write calls
# unless the stream is opened in binary mode. It's cleaner just to
# patch that up here so subsequent doctest comparisons to <BLANKLINE>
# pass, as they'll fail on Windows w/whitespace + ^M (CRLF)
if is_win() and output:
output = re.sub(os.linesep, '\n', output)
if output:
print(output)
def cqlsh_err(cmds):
"""
Run cqlsh commands expecting error.
"""
output, err = _cqlsh(cmds)
# python coerces LF to OS-specific line-endings on print or write calls
# unless the stream is opened in binary mode. It's cleaner just to
# patch that up here so subsequent doctest comparisons to <BLANKLINE>
# pass, as they'll fail on Windows w/whitespace + ^M (CRLF)
if is_win() and output:
err = re.sub(os.linesep, '\n', err)
if not err:
raise RuntimeError("Expected cqlsh error but none occurred!")
return err
def cqlsh_err_print(cmds):
"""
Run cqlsh commands expecting error output, and print error output.
"""
print(cqlsh_err(cmds))
def cql(query):
"""
Performs cql query via python driver connection.
"""
return connection.execute(query)
return {
'ks': ks,
'enabled_ks': enabled_ks,
'cql': cql,
'cqlsh': cqlsh,
'cqlsh_print': cqlsh_print,
'cqlsh_err': cqlsh_err,
'cqlsh_err_print': cqlsh_err_print,
'tester': tester
}
def run_func_docstring(tester, test_func, globs=None, verbose=False, compileflags=None, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE):
"""
Similar to doctest.run_docstring_examples, but takes a single function/bound method,
extracts it's singular docstring (no looking for subobjects with tests),
runs it, and most importantly raises an exception if the test doesn't pass.
tester should be an instance of dtest.Tester
test_func should be a function/bound method the docstring to be tested
"""
name = test_func.__name__
if globs is None:
globs = build_doc_context(tester, name)
# dumb function that remembers values that it is called with
# the DocTestRunner.run function called below accepts a callable for logging
# and this is a hacky but easy way to capture the nicely formatted value for reporting
def test_output_capturer(content):
if not hasattr(test_output_capturer, 'content'):
test_output_capturer.content = ''
test_output_capturer.content += content
test = doctest.DocTestParser().get_doctest(inspect.getdoc(test_func), globs, name, None, None)
runner = doctest.DocTestRunner(verbose=verbose, optionflags=optionflags)
runner.run(test, out=test_output_capturer, compileflags=compileflags)
failed, attempted = runner.summarize()
if failed > 0:
raise RuntimeError("Doctest failed! Captured output:\n{}".format(test_output_capturer.content))
if failed + attempted == 0:
raise RuntimeError("No tests were run!")
@since('2.2')
class ToJsonSelectTests(Tester):
"""
Tests using toJson with a SELECT statement
"""
def basic_data_types_test(self):
"""
Create our schema:
>>> cqlsh_print('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Insert a row with only the key defined:
>>> cqlsh('''
... INSERT into primitive_type_test (key1) values ('foo')
... ''')
Get the non-key values as json:
>>> cqlsh_print('''
... SELECT toJson(col1), toJson(col2), toJson(col3), toJson(col4), toJson(col5),
... toJson(col6), toJson(col7), toJson(col8), toJson(col9), toJson(col10),
... toJson(col11),toJson(col12),toJson(col13),toJson(col14),toJson(col15)
... FROM primitive_type_test WHERE key1 = 'foo'
... ''')
<BLANKLINE>
system.tojson(col1) | system.tojson(col2) | system.tojson(col3) | system.tojson(col4) | system.tojson(col5) | system.tojson(col6) | system.tojson(col7) | system.tojson(col8) | system.tojson(col9) | system.tojson(col10) | system.tojson(col11) | system.tojson(col12) | system.tojson(col13) | system.tojson(col14) | system.tojson(col15)
---------------------+---------------------+---------------------+---------------------+---------------------+---------------------+---------------------+---------------------+---------------------+----------------------+----------------------+----------------------+----------------------+----------------------+----------------------
null | null | null | null | null | null | null | null | null | null | null | null | null | null | null
<BLANKLINE>
(1 rows)
<BLANKLINE>
Update the row to have all values defined:
>>> cqlsh('''
... INSERT INTO primitive_type_test (key1, col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15)
... VALUES ('foo', 'bar', 0x0011, '127.0.0.1', 'blarg', '2011-02-03 04:05+0000', 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, 'bleh', -9223372036854775808, 1234.45678, 98712312.1222, 98712312.5252, -2147483648, 2147483648, true)
... ''')
Query the values back as json:
>>> cqlsh_print('''
... SELECT toJson(col1), toJson(col2), toJson(col3), toJson(col4), toJson(col5),
... toJson(col6), toJson(col7), toJson(col8), toJson(col9), toJson(col10),
... toJson(col11),toJson(col12),toJson(col13),toJson(col14),toJson(col15)
... FROM primitive_type_test WHERE key1 = 'foo'
... ''')
<BLANKLINE>
system.tojson(col1) | system.tojson(col2) | system.tojson(col3) | system.tojson(col4) | system.tojson(col5) | system.tojson(col6) | system.tojson(col7) | system.tojson(col8) | system.tojson(col9) | system.tojson(col10) | system.tojson(col11) | system.tojson(col12) | system.tojson(col13) | system.tojson(col14) | system.tojson(col15)
---------------------+---------------------+---------------------+---------------------+----------------------------+----------------------------------------+----------------------------------------+---------------------+----------------------+----------------------+----------------------+----------------------+----------------------+----------------------+----------------------
"bar" | "0x0011" | "127.0.0.1" | "blarg" | "2011..................... | "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f" | "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a" | "bleh" | -9223372036854775808 | 1234.45678 | 9.87123121222E7 | 9.8712312E7 | -2147483648 | 2147483648 | true
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.basic_data_types_test)
# yes, it's probably weird to use json for counter changes
def counters_test(self):
"""
Add a table with a few counters:
>>> cqlsh('''
... CREATE TABLE my_counters (
... key1 text PRIMARY KEY,
... col1 counter,
... col2 counter,
... col3 counter )
... ''')
Add a row with some counter values unset, and one incremented:
>>> cqlsh("UPDATE my_counters SET col1 = col1+1 WHERE key1 = 'foo'")
Query the empty/non-empty values back as json:
>>> cqlsh_print('''
... SELECT toJson(col1), toJson(col2), toJson(col3) from my_counters
... ''')
<BLANKLINE>
system.tojson(col1) | system.tojson(col2) | system.tojson(col3)
---------------------+---------------------+---------------------
1 | null | null
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.counters_test)
def complex_data_types_test(self):
"""
Build some user types and a schema that uses them:
>>> cqlsh("CREATE TYPE t_todo_item (label text, details text)")
>>> cqlsh("CREATE TYPE t_todo_list (name text, todo_list list<frozen<t_todo_item>>)")
>>> cqlsh('''
... CREATE TYPE t_kitchen_sink (
... item1 ascii,
... item2 blob,
... item3 inet,
... item4 text,
... item5 timestamp,
... item6 timeuuid,
... item7 uuid,
... item8 varchar,
... item9 bigint,
... item10 decimal,
... item11 double,
... item12 float,
... item13 int,
... item14 varint,
... item15 boolean,
... item16 list<int> )
... ''')
>>> cqlsh('''
... CREATE TABLE complex_types (
... key1 text PRIMARY KEY,
... mylist list<text>,
... myset set<uuid>,
... mymap map<text, int>,
... mytuple frozen<tuple<text, int, uuid, boolean>>,
... myudt frozen<t_kitchen_sink>,
... mytodolists list<frozen<t_todo_list>>,
... many_sinks list<frozen<t_kitchen_sink>>,
... named_sinks map<text, frozen<t_kitchen_sink>> )
... ''')
Add a row without the complex fields defined:
>>> cqlsh("INSERT INTO complex_types (key1) values ('foo')")
Call toJson on the null fields:
>>> cqlsh_print('''
... SELECT toJson(mylist), toJson(myset), toJson(mymap), toJson(mytuple), toJson(myudt), toJson(mytodolists), toJson(many_sinks), toJson(named_sinks)
... FROM complex_types where key1 = 'foo'
... ''')
<BLANKLINE>
system.tojson(mylist) | system.tojson(myset) | system.tojson(mymap) | system.tojson(mytuple) | system.tojson(myudt) | system.tojson(mytodolists) | system.tojson(many_sinks) | system.tojson(named_sinks)
-----------------------+----------------------+----------------------+------------------------+----------------------+----------------------------+---------------------------+----------------------------
null | null | null | null | null | null | null | null
<BLANKLINE>
(1 rows)
<BLANKLINE>
Define a row with the complex data types:
>>> cqlsh('''
... INSERT INTO complex_types (key1, mylist, myset, mymap, mytuple, myudt, mytodolists, many_sinks, named_sinks)
... VALUES (
... 'foo',
... ['five', 'six', 'seven', 'eight'],
... {4b66458a-2a19-41d3-af25-6faef4dea9fe, 080fdd90-ae74-41d6-9883-635625d3b069, 6cd7fab5-eacc-45c3-8414-6ad0177651d6},
... {'one' : 1, 'two' : 2, 'three': 3, 'four': 4},
... ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, true),
... {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011-02-03 04:05+0000', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 98712312.1222, item12: 98712312.5252, item13: -2147483648, item14: 2147483647, item15: false, item16: [1,3,5,7,11,13]},
... [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list:[{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}],
... [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}],
... {'namedsink1':{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]},'namedsink2':{item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},'namedsink3':{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}})
... ''')
Query back the json (one field at a time to make it easier to read) and make sure it looks as it should
Check that the list is returned ok:
>>> cqlsh_print("SELECT toJson(mylist) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(mylist)
-----------------------------------
["five", "six", "seven", "eight"]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(myset) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(myset)
--------------------------------------------------------------------------------------------------------------------------
["080fdd90-ae74-41d6-9883-635625d3b069", "4b66458a-2a19-41d3-af25-6faef4dea9fe", "6cd7fab5-eacc-45c3-8414-6ad0177651d6"]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(mymap) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(mymap)
---------------------------------------------
{"four": 4, "one": 1, "three": 3, "two": 2}
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(mytuple) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(mytuple)
-----------------------------------------------------------
["hey", 10, "16e69fba-a656-4932-8a01-6782a34505d9", true]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(myudt) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(myudt)
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{"item1": "heyimascii", "item2": "0x0011", "item3": "127.0.0.1", "item4": "whatev", "item5": "2011...", "item6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "item7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "item8": "bleh", "item9": -9223372036854775808, "item10": 1234.45678, "item11": 9.87123121222E7, "item12": 9.8712312E7, "item13": -2147483648, "item14": 2147483647, "item15": false, "item16": [1, 3, 5, 7, 11, 13]}
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(mytodolists) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(mytodolists)
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[{"name": "stuff to do!", "todo_list": [{"label": "buy groceries", "details": "bread and milk"}, {"label": "pick up car from shop", "details": "$325 due"}, {"label": "call dave", "details": "for some reason"}]}, {"name": "more stuff to do!", "todo_list": [{"label": "buy new car", "details": "the old one is getting expensive"}, {"label": "price insurance", "details": "current cost is $95/mo"}]}]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(many_sinks) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(many_sinks)
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[{"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012...Z", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 1.00123121222E7, "item12": 4.0012312E7, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1, 1, 2, 3, 5, 8]}, {"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013...Z", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 2.00123121222E7, "item12": 5.0012312E7, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3, 6, 9, 12, 15]}, {"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014...Z", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 3.00123121222E7, "item12": 6.0012312E7, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0, 1, 0, 1, 2, 0]}]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT toJson(named_sinks) from complex_types where key1 = 'foo'")
<BLANKLINE>
system.tojson(named_sinks)
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{"namedsink1": {"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012...Z", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 1.00123121222E7, "item12": 4.0012312E7, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1, 1, 2, 3, 5, 8]}, "namedsink2": {"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013...Z", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 2.00123121222E7, "item12": 5.0012312E7, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3, 6, 9, 12, 15]}, "namedsink3": {"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014...Z", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 3.00123121222E7, "item12": 6.0012312E7, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0, 1, 0, 1, 2, 0]}}
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.complex_data_types_test)
@since('2.2')
class FromJsonUpdateTests(Tester):
"""
Tests using fromJson within UPDATE statements.
"""
def basic_data_types_test(self):
"""
Create a table with the primitive types:
>>> cqlsh('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Create a basic row and update the row using fromJson:
>>> cqlsh('''INSERT INTO primitive_type_test (key1, col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15)
... VALUES ('test', 'bar', 0x0011, '127.0.0.1', 'blarg', '2011-02-03 04:05+0000', 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, 'bleh', -9223372036854775808, 1234.45678, 98712312.1222, 98712312.5252, -2147483648, 2147483648, true)
... ''')
>>> cqlsh('''
... UPDATE primitive_type_test
... SET col1 = fromJson('"bar1"'),
... col2 = fromJson('"0x0012"'),
... col3 = fromJson('"127.0.0.2"'),
... col4 = fromJson('"blarg2"'),
... col5 = fromJson('"2011-02-02 23:05:00.000"'),
... col6 = fromJson('"efe0922a-8638-11e4-b2ac-b4b6763e9d6f"'),
... col7 = fromJson('"05dd0249-25b4-4dec-ba27-54f8730f3c03"'),
... col8 = fromJson('"bleh2"'),
... col9 = fromJson('-8223372036854775808'),
... col10 = fromJson('"2234.45678"'),
... col11 = fromJson('8.87123121222E7'),
... col12 = fromJson('7.8712312E7'),
... col13 = fromJson('-1947483648'),
... col14 = fromJson('"1847483648"'),
... col15 = fromJson('false')
... WHERE key1 = 'test'
... ''')
Query back the row and make sure data is represented correctly:
>>> cqlsh_print('''
... SELECT col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15
... FROM primitive_type_test WHERE key1 = 'test'
... ''')
<BLANKLINE>
col1 | col2 | col3 | col4 | col5 | col6 | col7 | col8 | col9 | col10 | col11 | col12 | col13 | col14 | col15
------+--------+-----------+--------+--------------------------+--------------------------------------+--------------------------------------+-------+----------------------+------------+------------+------------+-------------+------------+-------
bar1 | 0x0012 | 127.0.0.2 | blarg2 | 2011.....................| efe0922a-8638-11e4-b2ac-b4b6763e9d6f | 05dd0249-25b4-4dec-ba27-54f8730f3c03 | bleh2 | -8223372036854775808 | 2234.45678 | 8.8712e+07 | 7.8712e+07 | -1947483648 | 1847483648 | False
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.basic_data_types_test)
def complex_data_types_test(self):
""""
UDT and schema setup:
>>> cqlsh('''
... CREATE TYPE t_todo_item (
... label text,
... details text)
... ''')
>>> cqlsh('''
... CREATE TYPE t_todo_list (
... name text,
... todo_list list<frozen<t_todo_item>>)
... ''')
>>> cqlsh('''
... CREATE TYPE t_kitchen_sink (
... item1 ascii,
... item2 blob,
... item3 inet,
... item4 text,
... item5 timestamp,
... item6 timeuuid,
... item7 uuid,
... item8 varchar,
... item9 bigint,
... item10 decimal,
... item11 double,
... item12 float,
... item13 int,
... item14 varint,
... item15 boolean,
... item16 list<int> )
... ''')
>>> cqlsh('''
... CREATE TABLE complex_types (
... key1 text PRIMARY KEY,
... mylist list<text>,
... myset set<uuid>,
... mymap map<text, int>,
... mytuple frozen<tuple<text, int, uuid, boolean>>,
... myudt frozen<t_kitchen_sink>,
... mytodolists list<frozen<t_todo_list>>,
... many_sinks list<frozen<t_kitchen_sink>>,
... named_sinks map<text, frozen<t_kitchen_sink>> )
... ''')
Insert a row using plain cql values, then update the complex types using fromJson:
>>> cqlsh('''
... INSERT INTO complex_types (key1, mylist, myset, mymap, mytuple, myudt, mytodolists, many_sinks, named_sinks)
... VALUES (
... 'row1',
... ['five', 'six', 'seven', 'eight'],
... {4b66458a-2a19-41d3-af25-6faef4dea9fe, 080fdd90-ae74-41d6-9883-635625d3b069, 6cd7fab5-eacc-45c3-8414-6ad0177651d6},
... {'one' : 1, 'two' : 2, 'three': 3, 'four': 4},
... ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, true),
... {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011-02-03 04:05+0000', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 98712312.1222, item12: 98712312.5252, item13: -2147483648, item14: 2147483647, item15: false, item16: [1,3,5,7,11,13]},
... [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list:[{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}],
... [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}],
... {'namedsink1':{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]},'namedsink2':{item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},'namedsink3':{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}})
... ''')
>>> cqlsh('''
... UPDATE complex_types
... SET mylist = fromJson('["nine", "ten", "eleven"]'),
... myset = fromJson('["74887ce9-cea2-4d63-b874-cbe0a376bd3b", "3819f267-7075-4261-a33a-72e1e5a851d9"]'),
... mymap = fromJson('{"five" : 5, "six" : 6, "seven": 7}'),
... mytuple = fromJson('["whah?", 437, "e2058138-9a60-4f72-94f1-f48a21d59ff2", false]'),
... myudt = fromJson('{"item1": "imitem1", "item2": "0x0014", "item3": "127.0.1.3", "item4": "asdf", "item5": "2009-02-01 04:05+0000", "item6": "25ba53f2-8645-11e4-afbc-b4b6763e9d6f", "item7": "97e053fb-225d-400a-9d4d-6b989e7d0cd9", "item8": "fdsa", "item9": -5223372036854775808, "item10": 2134.45678, "item11": 78712312.1222, "item12": 66712312.5252, "item13": -1347483648, "item14": 1097483647, "item15": true, "item16": [13,11,7,5,3,2,1]}'),
... mytodolists = fromJson('[{"name": "a simple todo list", "todo_list": [{"label": "go to the store", "details": "need bread and milk"}, {"label": "drop off rental car", "details": "$180 due"}, {"label": "call bob", "details": "left a message"}]}, {"name": "a second todo list", "todo_list":[{"label": "buy a race car", "details": "need to go faster"}, {"label": "go running", "details": "just because"}]}]'),
... many_sinks = fromJson('[{"item1": "qwerty", "item2": "0x0212", "item3": "127.5.5.5", "item4": "whatever0", "item5": "1999-02-03 04:05+0000", "item6": "3137ee20-8649-11e4-853d-b4b6763e9d6f", "item7": "034ca793-7147-45f5-bc60-2d5e76cc735d", "item8": "rewq", "item9": -6623372036854771111, "item10": 321.45678, "item11": 9992312.1222, "item12": 33012312.5252, "item13": -1547483648, "item14": 1847483648, "item15": false, "item16": [0,0,5,5,10,20]}, {"item1": "ljsdf", "item2": "0x2131", "item3": "10.10.1.5", "item4": "whatever1", "item5": "1987-02-08 02:05+0000", "item6": "a7a7c3d2-8649-11e4-88ff-b4b6763e9d6f", "item7": "9dfc88cd-ab10-4cf8-8046-344c905558c4", "item8": "vbnm", "item9": -1933372036854775808, "item10": 5534.12321, "item11": 19912312.1222, "item12": 49912312.5252, "item13": -997483648, "item14": 1007483648, "item15": true, "item16": [6,9,12,15,18,21]},{"item1": "hjkl", "item2": "0x2006", "item3": "127.3.3.1", "item4": "whatever2", "item5": "1996-04-01 04:05+0000", "item6": "0ed3a30a-864a-11e4-a596-b4b6763e9d6f", "item7": "49a74392-8295-4370-ba49-2ea57aaa5107", "item8": "nanananana", "item9": -1859372036854775808, "item10": 5455.55555, "item11": 29992312.1222, "item12": 59912312.5252, "item13": 1327483647, "item14": 1017483648, "item15": false, "item16": [1,0,5,10,1000]}]'),
... named_sinks = fromJson('{"namedsink5000":{"item1": "aaaaaa", "item2": "0x5555", "item3": "127.1.2.3", "item4": "whatev10", "item5": "1999-02-01 01:05+0000", "item6": "c2c5dc0c-864a-11e4-adc6-b4b6763e9d6f", "item7": "2d630a5d-a9e1-4e4b-b551-fb953e16c1f8", "item8": "8s8s8s8aaaa", "item9": -3323372036854771111, "item10": 1221.45678, "item11": 8812312.1222, "item12": 20112312.5252, "item13": -757483648, "item14": 1017483648, "item15": false, "item16": [6,7,8,9,8,7]},"namedsink5001":{"item1": "bbbbbb", "item2": "0x000092", "item3": "192.168.0.3", "item4": "whatev20", "item5": "2003-08-03 01:05+0000", "item6": "3e4c2a16-864b-11e4-bb20-b4b6763e9d6f", "item7": "e404dd5c-9dc7-4b1d-bce9-eb85bb72297c", "item8": "mariachi", "item9": -2211372036854775808, "item10": 34.12321, "item11": 13132312.1222, "item12": 39912312.5252, "item13": -987483648, "item14": 1047483648, "item15": true, "item16": [500]},"namedsink5002":{"item1": "ccccccccc", "item2": "0x1002", "item3": "192.168.100.5", "item4": "whatev30", "item5": "2017-012-03 04:05+0000", "item6": "65badb36-8652-11e4-a5b9-b4b6763e9d6f", "item7": "aa371f6e-8044-4d11-8aec-2e698e55bb87", "item8": "abcdef", "item9": -1144372036854775808, "item10": 4321.55555, "item11": 21212312.1222, "item12": 33912312.5252, "item13": 1147483647, "item14": 1047483648, "item15": false, "item16": [100,200,500,300]}}')
... WHERE key1 = 'row1'
... ''')
Query back the fields one by one and make sure they match the updates:
>>> cqlsh_print("SELECT mylist from complex_types where key1 = 'row1'")
<BLANKLINE>
mylist
---------------------------
['nine', 'ten', 'eleven']
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT myset from complex_types where key1 = 'row1'")
<BLANKLINE>
myset
------------------------------------------------------------------------------
{3819f267-7075-4261-a33a-72e1e5a851d9, 74887ce9-cea2-4d63-b874-cbe0a376bd3b}
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT mymap from complex_types where key1 = 'row1'")
<BLANKLINE>
mymap
-----------------------------------
{'five': 5, 'seven': 7, 'six': 6}
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT mytuple from complex_types where key1 = 'row1'")
<BLANKLINE>
mytuple
-------------------------------------------------------------
('whah?', 437, e2058138-9a60-4f72-94f1-f48a21d59ff2, False)
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT myudt from complex_types where key1 = 'row1'")
<BLANKLINE>
myudt
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{item1: 'imitem1', item2: 0x0014, item3: '127.0.1.3', item4: 'asdf', item5: '2009...', item6: 25ba53f2-8645-11e4-afbc-b4b6763e9d6f, item7: 97e053fb-225d-400a-9d4d-6b989e7d0cd9, item8: 'fdsa', item9: -5223372036854775808, item10: 2134.45678, item11: 7.8712e+07, item12: 6.6712e+07, item13: -1347483648, item14: 1097483647, item15: True, item16: [13, 11, 7, 5, 3, 2, 1]}
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT mytodolists from complex_types where key1 = 'row1'")
<BLANKLINE>
mytodolists
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[{name: 'a simple todo list', todo_list: [{label: 'go to the store', details: 'need bread and milk'}, {label: 'drop off rental car', details: '$180 due'}, {label: 'call bob', details: 'left a message'}]}, {name: 'a second todo list', todo_list: [{label: 'buy a race car', details: 'need to go faster'}, {label: 'go running', details: 'just because'}]}]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT many_sinks from complex_types where key1 = 'row1'")
<BLANKLINE>
many_sinks
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
[{item1: 'qwerty', item2: 0x0212, item3: '127.5.5.5', item4: 'whatever0', item5: '1999...', item6: 3137ee20-8649-11e4-853d-b4b6763e9d6f, item7: 034ca793-7147-45f5-bc60-2d5e76cc735d, item8: 'rewq', item9: -6623372036854771111, item10: 321.45678, item11: 9.9923e+06, item12: 3.3012e+07, item13: -1547483648, item14: 1847483648, item15: False, item16: [0, 0, 5, 5, 10, 20]}, {item1: 'ljsdf', item2: 0x2131, item3: '10.10.1.5', item4: 'whatever1', item5: '1987...', item6: a7a7c3d2-8649-11e4-88ff-b4b6763e9d6f, item7: 9dfc88cd-ab10-4cf8-8046-344c905558c4, item8: 'vbnm', item9: -1933372036854775808, item10: 5534.12321, item11: 1.9912e+07, item12: 4.9912e+07, item13: -997483648, item14: 1007483648, item15: True, item16: [6, 9, 12, 15, 18, 21]}, {item1: 'hjkl', item2: 0x2006, item3: '127.3.3.1', item4: 'whatever2', item5: '1996...', item6: 0ed3a30a-864a-11e4-a596-b4b6763e9d6f, item7: 49a74392-8295-4370-ba49-2ea57aaa5107, item8: 'nanananana', item9: -1859372036854775808, item10: 5455.55555, item11: 2.9992e+07, item12: 5.9912e+07, item13: 1327483647, item14: 1017483648, item15: False, item16: [1, 0, 5, 10, 1000]}]
<BLANKLINE>
(1 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT named_sinks from complex_types where key1 = 'row1'")
<BLANKLINE>
named_sinks
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{'namedsink5000': {item1: 'aaaaaa', item2: 0x5555, item3: '127.1.2.3', item4: 'whatev10', item5: '1999...', item6: c2c5dc0c-864a-11e4-adc6-b4b6763e9d6f, item7: 2d630a5d-a9e1-4e4b-b551-fb953e16c1f8, item8: '8s8s8s8aaaa', item9: -3323372036854771111, item10: 1221.45678, item11: 8.8123e+06, item12: 2.0112e+07, item13: -757483648, item14: 1017483648, item15: False, item16: [6, 7, 8, 9, 8, 7]}, 'namedsink5001': {item1: 'bbbbbb', item2: 0x000092, item3: '192.168.0.3', item4: 'whatev20', item5: '2003...', item6: 3e4c2a16-864b-11e4-bb20-b4b6763e9d6f, item7: e404dd5c-9dc7-4b1d-bce9-eb85bb72297c, item8: 'mariachi', item9: -2211372036854775808, item10: 34.12321, item11: 1.3132e+07, item12: 3.9912e+07, item13: -987483648, item14: 1047483648, item15: True, item16: [500]}, 'namedsink5002': {item1: 'ccccccccc', item2: 0x1002, item3: '192.168.100.5', item4: 'whatev30', item5: '2017...', item6: 65badb36-8652-11e4-a5b9-b4b6763e9d6f, item7: aa371f6e-8044-4d11-8aec-2e698e55bb87, item8: 'abcdef', item9: -1144372036854775808, item10: 4321.55555, item11: 2.1212e+07, item12: 3.3912e+07, item13: 1147483647, item14: 1047483648, item15: False, item16: [100, 200, 500, 300]}}
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.complex_data_types_test)
def collection_update_test(self):
"""
Setup schema, add a row:
>>> cqlsh('''
... CREATE TABLE basic_collections (
... key1 text PRIMARY KEY,
... mylist list<text>,
... myset set<text>,
... mymap map<text, int>)
... ''')
>>> cqlsh("INSERT INTO basic_collections (key1) values ('row1')")
Issue some updates:
>>> cqlsh('''
... UPDATE basic_collections
... SET mylist = fromJson('["c"]'),
... myset = fromJson('["f"]'),
... mymap = fromJson('{"one": 1}')
... WHERE key1 = 'row1'
... ''')
>>> cqlsh('''
... UPDATE basic_collections
... SET mylist = fromJson('["a","b"]') + mylist,
... myset = myset + fromJson('["d","e"]'),
... mymap['two'] = fromJson('2')
... WHERE key1 = 'row1'
... ''')
Query the row and make sure it's correct:
>>> cqlsh_print("SELECT * from basic_collections where key1 = 'row1'")
<BLANKLINE>
key1 | mylist | mymap | myset
------+-----------------+----------------------+-----------------
row1 | ['a', 'b', 'c'] | {'one': 1, 'two': 2} | {'d', 'e', 'f'}
<BLANKLINE>
(1 rows)
<BLANKLINE>
Some more updates of differing types:
>>> cqlsh('''
... UPDATE basic_collections
... SET mylist = mylist + fromJson('["d","e"]'),
... myset = myset + fromJson('["g"]'),
... mymap['three'] = fromJson('3')
... WHERE key1 = 'row1'
... ''')
>>> cqlsh('''
... UPDATE basic_collections
... SET mylist = mylist - fromJson('["b"]'),
... myset = myset - fromJson('["d"]'),
... mymap['three'] = fromJson('4')
... WHERE key1 = 'row1'
... ''')
Query final state and check it:
>>> cqlsh_print("SELECT * from basic_collections where key1 = 'row1'")
<BLANKLINE>
key1 | mylist | mymap | myset
------+----------------------+----------------------------------+-----------------
row1 | ['a', 'c', 'd', 'e'] | {'one': 1, 'three': 4, 'two': 2} | {'e', 'f', 'g'}
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.collection_update_test)
@since('2.2')
class FromJsonSelectTests(Tester):
"""
Tests using fromJson in conjunction with a SELECT statement
"""
def selecting_pkey_as_json_test(self):
"""
Schema setup:
>>> cqlsh('''
... CREATE TYPE t_person_name (
... first text,
... middle text,
... last text)
... ''')
>>> cqlsh('''
... CREATE TABLE person_info (
... name frozen<t_person_name> PRIMARY KEY,
... info text )
... ''')
Add a row:
>>> cqlsh("INSERT INTO person_info (name, info) VALUES ({first: 'test', middle: 'guy', last: 'jones'}, 'enjoys bacon')")
Query the row back on the primary key with fromJson:
>>> cqlsh_print('''
... SELECT * FROM person_info WHERE name = fromJson('{"first":"test", "middle":"guy", "last":"jones"}')
... ''')
<BLANKLINE>
name | info
-----------------------------------------------+--------------
{first: 'test', middle: 'guy', last: 'jones'} | enjoys bacon
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.selecting_pkey_as_json_test)
def select_using_secondary_index_test(self):
"""
Schema setup and secondary index:
>>> cqlsh('''
... CREATE TYPE t_person_name (
... first text,
... middle text,
... last text )
... ''')
>>> cqlsh('''
... CREATE TABLE person_likes (
... id uuid PRIMARY KEY,
... name frozen<t_person_name>,
... like text )
... ''')
>>> cqlsh("CREATE INDEX person_likes_name ON person_likes (name)")
Add a row:
>>> cqlsh("INSERT INTO person_likes (id, name, like) VALUES (99b81888-e889-44aa-a511-cbd451c8a024, {first:'test', middle: 'guy', last:'jones'}, 'art')")
Query the row back using fromJson with the secondary index:
>>> cqlsh_print('''
... SELECT * from person_likes where name = fromJson('{"first":"test", "middle":"guy", "last":"jones"}')
... ''')
<BLANKLINE>
id | like | name
--------------------------------------+------+-----------------------------------------------
99b81888-e889-44aa-a511-cbd451c8a024 | art | {first: 'test', middle: 'guy', last: 'jones'}
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.select_using_secondary_index_test)
@since('2.2')
class FromJsonInsertTests(Tester):
"""
Tests using fromJson within INSERT statements.
"""
def basic_data_types_test(self):
"""
Create a table with the primitive types:
>>> cqlsh('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Create a full row using fromJson for each value:
>>> cqlsh('''
... INSERT INTO primitive_type_test (key1, col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15)
... VALUES (fromJson('"test"'), fromJson('"bar"'), fromJson('"0x0011"'), fromJson('"127.0.0.1"'), fromJson('"blarg"'), fromJson('"2011-02-02 21:05:00.000"'), fromJson('"0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f"'), fromJson('"bdf5e8ac-a75e-4321-9ac8-938fc9576c4a"'), fromJson('"bleh"'), fromJson('-9223372036854775808'), fromJson('"1234.45678"'), fromJson('9.87123121222E7'), fromJson('9.8712312E7'), fromJson('-2147483648'), fromJson('"2147483648"'), fromJson('true'))
... ''')
Query back the row and make sure data is represented correctly:
>>> cqlsh_print('''
... SELECT col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15
... FROM primitive_type_test WHERE key1 = 'test'
... ''')
<BLANKLINE>
col1 | col2 | col3 | col4 | col5 | col6 | col7 | col8 | col9 | col10 | col11 | col12 | col13 | col14 | col15
------+--------+-----------+-------+--------------------------+--------------------------------------+--------------------------------------+------+----------------------+------------+------------+------------+-------------+------------+-------
bar | 0x0011 | 127.0.0.1 | blarg | 2011.....................| 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f | bdf5e8ac-a75e-4321-9ac8-938fc9576c4a | bleh | -9223372036854775808 | 1234.45678 | 9.8712e+07 | 9.8712e+07 | -2147483648 | 2147483648 | True
<BLANKLINE>
(1 rows)
<BLANKLINE>
Query row back as json to see if the json representation queried from the DB matches the json that was used on insert:
>>> cqlsh_print('''
... SELECT toJson(col1), toJson(col2), toJson(col3), toJson(col4), toJson(col5),
... toJson(col6), toJson(col7), toJson(col8), toJson(col9), toJson(col10),
... toJson(col11),toJson(col12),toJson(col13),toJson(col14),toJson(col15)
... FROM primitive_type_test WHERE key1 = 'test'
... ''')
<BLANKLINE>
system.tojson(col1) | system.tojson(col2) | system.tojson(col3) | system.tojson(col4) | system.tojson(col5) | system.tojson(col6) | system.tojson(col7) | system.tojson(col8) | system.tojson(col9) | system.tojson(col10) | system.tojson(col11) | system.tojson(col12) | system.tojson(col13) | system.tojson(col14) | system.tojson(col15)
---------------------+---------------------+---------------------+---------------------+----------------------------+----------------------------------------+----------------------------------------+---------------------+----------------------+----------------------+----------------------+----------------------+----------------------+----------------------+----------------------
"bar" | "0x0011" | "127.0.0.1" | "blarg" | "2011..................... | "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f" | "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a" | "bleh" | -9223372036854775808 | 1234.45678 | 9.87123121222E7 | 9.8712312E7 | -2147483648 | 2147483648 | true
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.basic_data_types_test)
def complex_data_types_test(self):
"""
Build some user types and a schema that uses them:
>>> cqlsh("CREATE TYPE t_todo_item (label text, details text)")
>>> cqlsh("CREATE TYPE t_todo_list (name text, todo_list list<frozen<t_todo_item>>)")
>>> cqlsh('''
... CREATE TYPE t_kitchen_sink (
... item1 ascii,
... item2 blob,
... item3 inet,
... item4 text,
... item5 timestamp,
... item6 timeuuid,
... item7 uuid,
... item8 varchar,
... item9 bigint,
... item10 decimal,
... item11 double,
... item12 float,
... item13 int,
... item14 varint,
... item15 boolean,
... item16 list<int> )
... ''')
>>> cqlsh('''
... CREATE TABLE complex_types (
... key1 text PRIMARY KEY,
... mylist list<text>,
... myset set<uuid>,
... mymap map<text, int>,
... mytuple frozen<tuple<text, int, uuid, boolean>>,
... myudt frozen<t_kitchen_sink>,
... mytodolists list<frozen<t_todo_list>>,
... many_sinks list<frozen<t_kitchen_sink>>,
... named_sinks map<text, frozen<t_kitchen_sink>> )
... ''')
>>> cqlsh('''
... INSERT INTO complex_types (key1, mylist, myset, mymap, mytuple, myudt, mytodolists, many_sinks, named_sinks)
... VALUES (
... 'row1',
... ['five', 'six', 'seven', 'eight'],
... {4b66458a-2a19-41d3-af25-6faef4dea9fe, 080fdd90-ae74-41d6-9883-635625d3b069, 6cd7fab5-eacc-45c3-8414-6ad0177651d6},
... {'one' : 1, 'two' : 2, 'three': 3, 'four': 4},
... ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, true),
... {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011-02-03 04:05+0000', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 98712312.1222, item12: 98712312.5252, item13: -2147483648, item14: 2147483647, item15: false, item16: [1,3,5,7,11,13]},
... [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list:[{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}],
... [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}],
... {'namedsink1':{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]},'namedsink2':{item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},'namedsink3':{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}})
... ''')
Add a row:
>>> cqlsh('''
... INSERT INTO complex_types (key1, mylist, myset, mymap, mytuple, myudt, mytodolists, many_sinks, named_sinks)
... VALUES (
... fromJson('"row2"'),
... fromJson('["five", "six", "seven", "eight"]'),
... fromJson('["4b66458a-2a19-41d3-af25-6faef4dea9fe", "080fdd90-ae74-41d6-9883-635625d3b069", "6cd7fab5-eacc-45c3-8414-6ad0177651d6"]'),
... fromJson('{"one" : 1, "two" : 2, "three": 3, "four": 4}'),
... fromJson('["hey", 10, "16e69fba-a656-4932-8a01-6782a34505d9", true]'),
... fromJson('{"item1": "heyimascii", "item2": "0x0011", "item3": "127.0.0.1", "item4": "whatev", "item5": "2011-02-03 04:05+0000", "item6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "item7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "item8": "bleh", "item9": -9223372036854775808, "item10": 1234.45678, "item11": 98712312.1222, "item12": 98712312.5252, "item13": -2147483648, "item14": 2147483647, "item15": false, "item16": [1,3,5,7,11,13]}'),
... fromJson('[{"name": "stuff to do!", "todo_list": [{"label": "buy groceries", "details": "bread and milk"}, {"label": "pick up car from shop", "details": "$325 due"}, {"label": "call dave", "details": "for some reason"}]}, {"name": "more stuff to do!", "todo_list":[{"label": "buy new car", "details": "the old one is getting expensive"}, {"label": "price insurance", "details": "current cost is $95/mo"}]}]'),
... fromJson('[{"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012-02-03 04:05+0000", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 10012312.1222, "item12": 40012312.5252, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1,1,2,3,5,8]}, {"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013-02-03 04:05+0000", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 20012312.1222, "item12": 50012312.5252, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3,6,9,12,15]},{"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014-02-03 04:05+0000", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 30012312.1222, "item12": 60012312.5252, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0,1,0,1,2,0]}]'),
... fromJson('{"namedsink1":{"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012-02-03 04:05+0000", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 10012312.1222, "item12": 40012312.5252, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1,1,2,3,5,8]},"namedsink2":{"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013-02-03 04:05+0000", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 20012312.1222, "item12": 50012312.5252, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3,6,9,12,15]},"namedsink3":{"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014-02-03 04:05+0000", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 30012312.1222, "item12": 60012312.5252, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0,1,0,1,2,0]}}'))
... ''')
Query back the the normal cql inserted row and the fromJson inserted row, and compare to make sure they match (do this one field at a time for easier reading):
>>> cqlsh_print("SELECT key1, mylist from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | mylist
------+-----------------------------------
row1 | ['five', 'six', 'seven', 'eight']
row2 | ['five', 'six', 'seven', 'eight']
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, myset from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | myset
------+--------------------------------------------------------------------------------------------------------------------
row1 | {080fdd90-ae74-41d6-9883-635625d3b069, 4b66458a-2a19-41d3-af25-6faef4dea9fe, 6cd7fab5-eacc-45c3-8414-6ad0177651d6}
row2 | {080fdd90-ae74-41d6-9883-635625d3b069, 4b66458a-2a19-41d3-af25-6faef4dea9fe, 6cd7fab5-eacc-45c3-8414-6ad0177651d6}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mymap from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | mymap
------+---------------------------------------------
row1 | {'four': 4, 'one': 1, 'three': 3, 'two': 2}
row2 | {'four': 4, 'one': 1, 'three': 3, 'two': 2}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mytuple from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | mytuple
------+---------------------------------------------------------
row1 | ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, True)
row2 | ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, True)
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, myudt from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | myudt
------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011...', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 9.8712e+07, item12: 9.8712e+07, item13: -2147483648, item14: 2147483647, item15: False, item16: [1, 3, 5, 7, 11, 13]}
row2 | {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011...', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 9.8712e+07, item12: 9.8712e+07, item13: -2147483648, item14: 2147483647, item15: False, item16: [1, 3, 5, 7, 11, 13]}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mytodolists from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | mytodolists
------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list: [{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}]
row2 | [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list: [{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}]
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, many_sinks from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | many_sinks
------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}]
row2 | [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}]
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, named_sinks from complex_types where key1 in ('row1', 'row2')")
<BLANKLINE>
key1 | named_sinks
------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | {'namedsink1': {item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, 'namedsink2': {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, 'namedsink3': {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}}
row2 | {'namedsink1': {item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, 'namedsink2': {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, 'namedsink3': {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}}
<BLANKLINE>
(2 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.complex_data_types_test)
@since('2.2')
class FromJsonDeleteTests(Tester):
"""
Tests using fromJson within DELETE statements.
"""
def delete_using_pkey_json_test(self):
"""
Schema setup:
>>> cqlsh('''
... CREATE TYPE t_person_name (
... first text,
... middle text,
... last text)
... ''')
>>> cqlsh('''
... CREATE TABLE person_info (
... name frozen<t_person_name> PRIMARY KEY,
... info text)
... ''')
Add a row:
>>> cqlsh("INSERT INTO person_info (name, info) VALUES ({first: 'test', middle: 'guy', last: 'jones'}, 'enjoys bacon')")
Make sure the row is there:
>>> cqlsh_print('''
... SELECT * FROM person_info WHERE name = fromJson('{"first":"test", "middle":"guy", "last":"jones"}')
... ''')
<BLANKLINE>
name | info
-----------------------------------------------+--------------
{first: 'test', middle: 'guy', last: 'jones'} | enjoys bacon
<BLANKLINE>
(1 rows)
<BLANKLINE>
Delete the row using a fromJson clause:
>>> cqlsh('''
... DELETE FROM person_info WHERE name = fromJson('{"first":"test", "middle":"guy", "last":"jones"}')
... ''')
Make sure the row is gone:
>>> cqlsh_print("SELECT COUNT(*) from person_info")
<BLANKLINE>
count
-------
0
<BLANKLINE>
(1 rows)
<BLANKLINE>
Warnings :
Aggregation query used without partition key
<BLANKLINE>
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.delete_using_pkey_json_test)
@since('2.2')
class JsonFullRowInsertSelect(Tester):
"""
Tests for creating full rows from json documents, selecting full rows back as json documents, and related functionality.
"""
def simple_schema_test(self):
"""
Create schema:
>>> cqlsh('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Add two rows with all null values, create the first row using a regular INSERT statement, and the second row using JSON. Different key for each row:
>>> cqlsh("INSERT INTO primitive_type_test (key1) values ('foo')")
>>> cqlsh('''
... INSERT INTO primitive_type_test JSON '{"key1":"bar"}'
... ''')
Query back both rows as JSON:
>>> cqlsh_print("SELECT JSON * FROM primitive_type_test")
<BLANKLINE>
[json]
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{"key1": "bar", "col1": null, "col10": null, "col11": null, "col12": null, "col13": null, "col14": null, "col15": null, "col2": null, "col3": null, "col4": null, "col5": null, "col6": null, "col7": null, "col8": null, "col9": null}
{"key1": "foo", "col1": null, "col10": null, "col11": null, "col12": null, "col13": null, "col14": null, "col15": null, "col2": null, "col3": null, "col4": null, "col5": null, "col6": null, "col7": null, "col8": null, "col9": null}
<BLANKLINE>
(2 rows)
<BLANKLINE>
Query back both rows as non-JSON to be sure they look ok there:
>>> cqlsh_print("SELECT * FROM primitive_type_test")
<BLANKLINE>
key1 | col1 | col10 | col11 | col12 | col13 | col14 | col15 | col2 | col3 | col4 | col5 | col6 | col7 | col8 | col9
------+------+-------+-------+-------+-------+-------+-------+------+------+------+------+------+------+------+------
bar | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null
foo | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null
<BLANKLINE>
(2 rows)
<BLANKLINE>
Use a plain insert to update one row, and a JSON insert to update the other:
>>> cqlsh('''
... INSERT INTO primitive_type_test (key1, col1, col2, col3, col4, col5, col6, col7, col8, col9, col10, col11, col12, col13, col14, col15)
... VALUES ('foo', 'bar', 0x0011, '127.0.0.1', 'blarg', '2011-02-03 04:05+0000', 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, 'bleh', -9223372036854775808, 1234.45678, 98712312.1222, 98712312.5252, -2147483648, 2147483648, true)
... ''')
>>> cqlsh('''
... INSERT INTO primitive_type_test JSON '{"key1": "bar", "col1": "bar", "col2": "0x0011", "col3": "127.0.0.1", "col4": "blarg", "col5": "2011-02-02 21:05:00.000", "col6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "col7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "col8": "bleh", "col9": -9223372036854775808, "col10": "1234.45678", "col11":9.87123121222E7, "col12": 9.87123121222E7, "col13": -2147483648, "col14": 2147483648, "col15": true}'
... ''')
Query back both rows as JSON:
>>> cqlsh_print("SELECT JSON * FROM primitive_type_test")
<BLANKLINE>
[json]
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
{"key1": "bar", "col1": "bar", "col10": 1234.45678, "col11": 9.87123121222E7, "col12": 9.8712312E7, "col13": -2147483648, "col14": 2147483648, "col15": true, "col2": "0x0011", "col3": "127.0.0.1", "col4": "blarg", "col5": "2011...Z", "col6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "col7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "col8": "bleh", "col9": -9223372036854775808}
{"key1": "foo", "col1": "bar", "col10": 1234.45678, "col11": 9.87123121222E7, "col12": 9.8712312E7, "col13": -2147483648, "col14": 2147483648, "col15": true, "col2": "0x0011", "col3": "127.0.0.1", "col4": "blarg", "col5": "2011...Z", "col6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "col7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "col8": "bleh", "col9": -9223372036854775808}
<BLANKLINE>
(2 rows)
<BLANKLINE>
Query back both rows, but with only some JSON fields:
>>> cqlsh_print("SELECT JSON col15, col1, col3, col13, col11, col2, col4 FROM primitive_type_test WHERE key1 in ('foo', 'bar')")
<BLANKLINE>
[json]
----------------------------------------------------------------------------------------------------------------------------------------
{"col15": true, "col1": "bar", "col3": "127.0.0.1", "col13": -2147483648, "col11": 9.87123121222E7, "col2": "0x0011", "col4": "blarg"}
{"col15": true, "col1": "bar", "col3": "127.0.0.1", "col13": -2147483648, "col11": 9.87123121222E7, "col2": "0x0011", "col4": "blarg"}
<BLANKLINE>
(2 rows)
<BLANKLINE>
Query rows normally and make sure they look ok there too:
>>> cqlsh_print("SELECT * FROM primitive_type_test")
<BLANKLINE>
key1 | col1 | col10 | col11 | col12 | col13 | col14 | col15 | col2 | col3 | col4 | col5 | col6 | col7 | col8 | col9
------+------+------------+------------+------------+-------------+------------+-------+--------+-----------+-------+--------------------------+--------------------------------------+--------------------------------------+------+----------------------
bar | bar | 1234.45678 | 9.8712e+07 | 9.8712e+07 | -2147483648 | 2147483648 | True | 0x0011 | 127.0.0.1 | blarg | 2011.....................| 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f | bdf5e8ac-a75e-4321-9ac8-938fc9576c4a | bleh | -9223372036854775808
foo | bar | 1234.45678 | 9.8712e+07 | 9.8712e+07 | -2147483648 | 2147483648 | True | 0x0011 | 127.0.0.1 | blarg | 2011.....................| 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f | bdf5e8ac-a75e-4321-9ac8-938fc9576c4a | bleh | -9223372036854775808
<BLANKLINE>
(2 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.simple_schema_test)
def pkey_requirement_test(self):
"""
Create schema:
>>> cqlsh('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Try to create a JSON row with the pkey omitted from the column list, and omitted from the JSON data:
>>> cqlsh_err_print('''INSERT INTO primitive_type_test JSON '{"col1": "bar"}' ''')
<stdin>:2:InvalidRequest: Error from server: code=2200 [Invalid query] message="Invalid null value in condition for column key1"
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.pkey_requirement_test)
def null_value_test(self):
"""
Create schema:
>>> cqlsh('''
... CREATE TABLE primitive_type_test (
... key1 text PRIMARY KEY,
... col1 ascii,
... col2 blob,
... col3 inet,
... col4 text,
... col5 timestamp,
... col6 timeuuid,
... col7 uuid,
... col8 varchar,
... col9 bigint,
... col10 decimal,
... col11 double,
... col12 float,
... col13 int,
... col14 varint,
... col15 boolean)
... ''')
Insert a row where all columns are specified in the column list, but none of the non-pkey items are provided in the JSON data:
>>> cqlsh('''
... INSERT INTO primitive_type_test JSON '{"key1": "foo"}'
... ''')
Confirm columns provided in column list but not specified are null:
>>> cqlsh_print("SELECT * FROM primitive_type_test WHERE key1 = 'foo'")
<BLANKLINE>
key1 | col1 | col10 | col11 | col12 | col13 | col14 | col15 | col2 | col3 | col4 | col5 | col6 | col7 | col8 | col9
------+------+-------+-------+-------+-------+-------+-------+------+------+------+------+------+------+------+------
foo | null | null | null | null | null | null | null | null | null | null | null | null | null | null | null
<BLANKLINE>
(1 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.null_value_test)
def complex_schema_test(self):
"""
Create some udt's and schema:
>>> cqlsh('''
... CREATE TYPE t_todo_item (
... label text,
... details text )
... ''')
>>> cqlsh('''
... CREATE TYPE t_todo_list (
... name text,
... todo_list list<frozen<t_todo_item>> )
... ''')
>>> cqlsh('''
... CREATE TYPE t_kitchen_sink (
... item1 ascii,
... item2 blob,
... item3 inet,
... item4 text,
... item5 timestamp,
... item6 timeuuid,
... item7 uuid,
... item8 varchar,
... item9 bigint,
... item10 decimal,
... item11 double,
... item12 float,
... item13 int,
... item14 varint,
... item15 boolean,
... item16 list<int> )
... ''')
>>> cqlsh('''
... CREATE TABLE complex_types (
... key1 text PRIMARY KEY,
... mylist list<text>,
... myset set<uuid>,
... mymap map<text, int>,
... mytuple frozen<tuple<text, int, uuid, boolean>>,
... myudt frozen<t_kitchen_sink>,
... mytodolists list<frozen<t_todo_list>>,
... many_sinks list<frozen<t_kitchen_sink>>,
... named_sinks map<text, frozen<t_kitchen_sink>> )
... ''')
Add two rows with all null values, create the first row using a regular INSERT statement, and the second row using JSON. Different key for each row:
>>> cqlsh('''
... INSERT INTO complex_types (key1) values ('row1')
... ''')
>>> cqlsh('''
... INSERT INTO complex_types JSON '{"key1":"row2"}'
... ''')
Query back both rows as JSON:
>>> cqlsh_print("SELECT JSON * FROM complex_types")
<BLANKLINE>
[json]
--------------------------------------------------------------------------------------------------------------------------------------------------------------
{"key1": "row1", "many_sinks": null, "mylist": null, "mymap": null, "myset": null, "mytodolists": null, "mytuple": null, "myudt": null, "named_sinks": null}
{"key1": "row2", "many_sinks": null, "mylist": null, "mymap": null, "myset": null, "mytodolists": null, "mytuple": null, "myudt": null, "named_sinks": null}
<BLANKLINE>
(2 rows)
<BLANKLINE>
Query back both rows as non-JSON to be sure they look ok there too:
>>> cqlsh_print('''
... SELECT * FROM complex_types
... ''')
<BLANKLINE>
key1 | many_sinks | mylist | mymap | myset | mytodolists | mytuple | myudt | named_sinks
------+------------+--------+-------+-------+-------------+---------+-------+-------------
row1 | null | null | null | null | null | null | null | null
row2 | null | null | null | null | null | null | null | null
<BLANKLINE>
(2 rows)
<BLANKLINE>
Add data for "row1" using a normal insert statement to update the record:
>>> cqlsh('''
... INSERT INTO complex_types (key1, mylist, myset, mymap, mytuple, myudt, mytodolists, many_sinks, named_sinks)
... VALUES (
... 'row1',
... ['five', 'six', 'seven', 'eight'],
... {4b66458a-2a19-41d3-af25-6faef4dea9fe, 080fdd90-ae74-41d6-9883-635625d3b069, 6cd7fab5-eacc-45c3-8414-6ad0177651d6},
... {'one' : 1, 'two' : 2, 'three': 3, 'four': 4},
... ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, true),
... {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011-02-03 04:05+0000', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 98712312.1222, item12: 98712312.5252, item13: -2147483648, item14: 2147483647, item15: false, item16: [1,3,5,7,11,13]},
... [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list:[{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}],
... [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}],
... {'namedsink1':{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012-02-03 04:05+0000', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 10012312.1222, item12: 40012312.5252, item13: -1147483648, item14: 2047483648, item15: true, item16: [1,1,2,3,5,8]},'namedsink2':{item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013-02-03 04:05+0000', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 20012312.1222, item12: 50012312.5252, item13: -1547483648, item14: 1947483648, item15: false, item16: [3,6,9,12,15]},'namedsink3':{item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014-02-03 04:05+0000', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 30012312.1222, item12: 60012312.5252, item13: 2147483647, item14: 1347483648, item15: true, item16: [0,1,0,1,2,0]}})
... ''')
Add data for "row2" using JSON, but which should be equivalent to "row1" after insert:
>>> cqlsh('''
... INSERT INTO complex_types
... JSON '{
... "key1":"row2",
... "mylist":["five", "six", "seven", "eight"],
... "myset":["4b66458a-2a19-41d3-af25-6faef4dea9fe", "080fdd90-ae74-41d6-9883-635625d3b069", "6cd7fab5-eacc-45c3-8414-6ad0177651d6"],
... "mymap":{"one" : 1, "two" : 2, "three": 3, "four": 4},
... "mytuple":["hey", 10, "16e69fba-a656-4932-8a01-6782a34505d9", true],
... "myudt":{"item1": "heyimascii", "item2": "0x0011", "item3": "127.0.0.1", "item4": "whatev", "item5": "2011-02-03 04:05+0000", "item6": "0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f", "item7": "bdf5e8ac-a75e-4321-9ac8-938fc9576c4a", "item8": "bleh", "item9": -9223372036854775808, "item10": 1234.45678, "item11": 98712312.1222, "item12": 98712312.5252, "item13": -2147483648, "item14": 2147483647, "item15": false, "item16": [1,3,5,7,11,13]},
... "mytodolists":[{"name": "stuff to do!", "todo_list": [{"label": "buy groceries", "details": "bread and milk"}, {"label": "pick up car from shop", "details": "$325 due"}, {"label": "call dave", "details": "for some reason"}]}, {"name": "more stuff to do!", "todo_list":[{"label": "buy new car", "details": "the old one is getting expensive"}, {"label": "price insurance", "details": "current cost is $95/mo"}]}],
... "many_sinks":[{"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012-02-03 04:05+0000", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 10012312.1222, "item12": 40012312.5252, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1,1,2,3,5,8]}, {"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013-02-03 04:05+0000", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 20012312.1222, "item12": 50012312.5252, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3,6,9,12,15]},{"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014-02-03 04:05+0000", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 30012312.1222, "item12": 60012312.5252, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0,1,0,1,2,0]}],
... "named_sinks":{"namedsink1":{"item1": "asdf", "item2": "0x0012", "item3": "127.0.0.2", "item4": "whatev1", "item5": "2012-02-03 04:05+0000", "item6": "d05a10c8-7c12-11e4-949d-b4b6763e9d6f", "item7": "f90b04b1-f9ad-4ffa-b869-a7d894ce6003", "item8": "tyru", "item9": -9223372036854771111, "item10": 4321.45678, "item11": 10012312.1222, "item12": 40012312.5252, "item13": -1147483648, "item14": 2047483648, "item15": true, "item16": [1,1,2,3,5,8]},"namedsink2":{"item1": "fdsa", "item2": "0x0013", "item3": "127.0.0.3", "item4": "whatev2", "item5": "2013-02-03 04:05+0000", "item6": "d8ac38c8-7c12-11e4-8955-b4b6763e9d6f", "item7": "e3e84f21-f28c-4e0f-80e0-068a640ae53a", "item8": "uytr", "item9": -3333372036854775808, "item10": 1234.12321, "item11": 20012312.1222, "item12": 50012312.5252, "item13": -1547483648, "item14": 1947483648, "item15": false, "item16": [3,6,9,12,15]},"namedsink3":{"item1": "zxcv", "item2": "0x0014", "item3": "127.0.0.4", "item4": "whatev3", "item5": "2014-02-03 04:05+0000", "item6": "de30838a-7c12-11e4-a907-b4b6763e9d6f", "item7": "f9381f0e-9467-4d4c-9315-eb9f0232487b", "item8": "fghj", "item9": -2239372036854775808, "item10": 5555.55555, "item11": 30012312.1222, "item12": 60012312.5252, "item13": 2147483647, "item14": 1347483648, "item15": true, "item16": [0,1,0,1,2,0]}}
... }'
... ''')
Query both rows back, one field at a time (for easier reading) and make sure they match:
>>> cqlsh_print("SELECT key1, mylist from complex_types")
<BLANKLINE>
key1 | mylist
------+-----------------------------------
row1 | ['five', 'six', 'seven', 'eight']
row2 | ['five', 'six', 'seven', 'eight']
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, myset from complex_types")
<BLANKLINE>
key1 | myset
------+--------------------------------------------------------------------------------------------------------------------
row1 | {080fdd90-ae74-41d6-9883-635625d3b069, 4b66458a-2a19-41d3-af25-6faef4dea9fe, 6cd7fab5-eacc-45c3-8414-6ad0177651d6}
row2 | {080fdd90-ae74-41d6-9883-635625d3b069, 4b66458a-2a19-41d3-af25-6faef4dea9fe, 6cd7fab5-eacc-45c3-8414-6ad0177651d6}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mymap from complex_types")
<BLANKLINE>
key1 | mymap
------+---------------------------------------------
row1 | {'four': 4, 'one': 1, 'three': 3, 'two': 2}
row2 | {'four': 4, 'one': 1, 'three': 3, 'two': 2}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mytuple from complex_types")
<BLANKLINE>
key1 | mytuple
------+---------------------------------------------------------
row1 | ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, True)
row2 | ('hey', 10, 16e69fba-a656-4932-8a01-6782a34505d9, True)
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, myudt from complex_types")
<BLANKLINE>
key1 | myudt
------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011...', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 9.8712e+07, item12: 9.8712e+07, item13: -2147483648, item14: 2147483647, item15: False, item16: [1, 3, 5, 7, 11, 13]}
row2 | {item1: 'heyimascii', item2: 0x0011, item3: '127.0.0.1', item4: 'whatev', item5: '2011...', item6: 0ad6dfb6-7a6e-11e4-bc39-b4b6763e9d6f, item7: bdf5e8ac-a75e-4321-9ac8-938fc9576c4a, item8: 'bleh', item9: -9223372036854775808, item10: 1234.45678, item11: 9.8712e+07, item12: 9.8712e+07, item13: -2147483648, item14: 2147483647, item15: False, item16: [1, 3, 5, 7, 11, 13]}
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, mytodolists from complex_types")
<BLANKLINE>
key1 | mytodolists
------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- row1 | [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list: [{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}]
row2 | [{name: 'stuff to do!', todo_list: [{label: 'buy groceries', details: 'bread and milk'}, {label: 'pick up car from shop', details: '$325 due'}, {label: 'call dave', details: 'for some reason'}]}, {name: 'more stuff to do!', todo_list: [{label: 'buy new car', details: 'the old one is getting expensive'}, {label: 'price insurance', details: 'current cost is $95/mo'}]}]
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, many_sinks from complex_types")
<BLANKLINE>
key1 | many_sinks
------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}]
row2 | [{item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}]
<BLANKLINE>
(2 rows)
<BLANKLINE>
>>> cqlsh_print("SELECT key1, named_sinks from complex_types")
<BLANKLINE>
key1 | named_sinks
------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
row1 | {'namedsink1': {item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, 'namedsink2': {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, 'namedsink3': {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}}
row2 | {'namedsink1': {item1: 'asdf', item2: 0x0012, item3: '127.0.0.2', item4: 'whatev1', item5: '2012...', item6: d05a10c8-7c12-11e4-949d-b4b6763e9d6f, item7: f90b04b1-f9ad-4ffa-b869-a7d894ce6003, item8: 'tyru', item9: -9223372036854771111, item10: 4321.45678, item11: 1.0012e+07, item12: 4.0012e+07, item13: -1147483648, item14: 2047483648, item15: True, item16: [1, 1, 2, 3, 5, 8]}, 'namedsink2': {item1: 'fdsa', item2: 0x0013, item3: '127.0.0.3', item4: 'whatev2', item5: '2013...', item6: d8ac38c8-7c12-11e4-8955-b4b6763e9d6f, item7: e3e84f21-f28c-4e0f-80e0-068a640ae53a, item8: 'uytr', item9: -3333372036854775808, item10: 1234.12321, item11: 2.0012e+07, item12: 5.0012e+07, item13: -1547483648, item14: 1947483648, item15: False, item16: [3, 6, 9, 12, 15]}, 'namedsink3': {item1: 'zxcv', item2: 0x0014, item3: '127.0.0.4', item4: 'whatev3', item5: '2014...', item6: de30838a-7c12-11e4-a907-b4b6763e9d6f, item7: f9381f0e-9467-4d4c-9315-eb9f0232487b, item8: 'fghj', item9: -2239372036854775808, item10: 5555.55555, item11: 3.0012e+07, item12: 6.0012e+07, item13: 2147483647, item14: 1347483648, item15: True, item16: [0, 1, 0, 1, 2, 0]}}
<BLANKLINE>
(2 rows)
<BLANKLINE>
"""
run_func_docstring(tester=self, test_func=self.complex_schema_test)
| 78.190602
| 1,372
| 0.491299
| 11,641
| 118,146
| 4.938064
| 0.077399
| 0.006611
| 0.008089
| 0.013395
| 0.849141
| 0.826752
| 0.821063
| 0.805459
| 0.786062
| 0.772667
| 0
| 0.203208
| 0.217519
| 118,146
| 1,510
| 1,373
| 78.242384
| 0.418597
| 0.867588
| 0
| 0.185714
| 0
| 0
| 0.085391
| 0.007587
| 0
| 0
| 0
| 0.029801
| 0
| 1
| 0.185714
| false
| 0
| 0.078571
| 0.007143
| 0.35
| 0.042857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
817cf626660625f40362318cba6444d8f3757677
| 72
|
py
|
Python
|
Chapter 01/Chap01_Example1.107.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.107.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.107.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
r=s=t=1 #--- I1
print(r + s + t)
r=s=t='1' #--- I2
print(r + s + t)
| 14.4
| 18
| 0.388889
| 18
| 72
| 1.555556
| 0.388889
| 0.285714
| 0.428571
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.277778
| 72
| 4
| 19
| 18
| 0.461538
| 0.166667
| 0
| 0.5
| 0
| 0
| 0.018519
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c49d7c026054a433d0b27ea65a3e4a7ad9f29b83
| 12,508
|
py
|
Python
|
wrappers/SONATAClient/admin.py
|
CN-UPB/python-mano-wrappers
|
8e3607feaa97bc3e2c906ee8e4b25b21853ea6cf
|
[
"Apache-2.0"
] | null | null | null |
wrappers/SONATAClient/admin.py
|
CN-UPB/python-mano-wrappers
|
8e3607feaa97bc3e2c906ee8e4b25b21853ea6cf
|
[
"Apache-2.0"
] | null | null | null |
wrappers/SONATAClient/admin.py
|
CN-UPB/python-mano-wrappers
|
8e3607feaa97bc3e2c906ee8e4b25b21853ea6cf
|
[
"Apache-2.0"
] | null | null | null |
from .auth import Auth
import json
import requests
class Admin():
def __init__(self, host, port=4002):
self._host = host
self._port = port
self._base_path = 'http://{0}/api/v2'
def get_user_list(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host)
else:
base_path = self._base_path.format(host)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/users".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_user_info(self, token, id, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/yaml", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/users/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_nsinstances_records(self, token, host=None, port=None):
if host is None:
base_path = "http://{0}:{1}".format(self._host, self._port)
else:
base_path = "http://{0}:{1}".format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/records/nsr/ns-instances".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_nsinstances_records_instanceId(self, token, id, host=None, port=None):
if host is None:
base_path = "http://{0}:{1}".format(self._host, self._port)
else:
base_path = "http://{0}:{1}".format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/records/nsr/ns-instances/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_vims_list(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/vims".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_vims_requestId(self, token, id, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/vims/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_instantions_requests(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/requests?limit=100".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_instantions_requests_requestId(self, id, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/requests/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_functions(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/functions".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_functions_functionId(self, token, id, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/functions/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_packages(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/packages".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_services(self, token, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/services".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_services_serviceId(self, token, id, host=None, port=None):
if host is None:
base_path = self._base_path.format(self._host, self._port)
else:
base_path = self._base_path.format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/services/{1}".format(base_path, id)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_ns_records(self, token, host=None, port=None):
if host is None:
base_path = "http://{0}:{1}".format(self._host, self._port)
else:
base_path = "http://{0}:{1}".format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/records/nsr".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
def get_services_records(self, token, host=None, port=None):
if host is None:
base_path = "http://{0}:{1}".format(self._host, self._port)
else:
base_path = "http://{0}:{1}".format(host, port)
result = {'error': True, 'data': ''}
headers = {"Content-Type": "application/json", 'Authorization': 'Bearer {}'.format(token)}
_endpoint = "{0}/records/services".format(base_path)
try:
r = requests.get(_endpoint, params=None, verify=False, stream=True, headers=headers)
except Exception as e:
result['data'] = str(e)
return result
if r.status_code == requests.codes.ok:
result['error'] = False
result['data'] = r.text
return json.dumps(result)
| 38.965732
| 99
| 0.556204
| 1,448
| 12,508
| 4.666436
| 0.053867
| 0.080509
| 0.040847
| 0.052094
| 0.967145
| 0.967145
| 0.967145
| 0.959893
| 0.954862
| 0.954862
| 0
| 0.00529
| 0.304765
| 12,508
| 321
| 100
| 38.965732
| 0.771734
| 0
| 0
| 0.844106
| 0
| 0
| 0.119288
| 0.006727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060837
| false
| 0
| 0.011407
| 0
| 0.190114
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ef9bff6fac4e259ed4eab71abd0e4b4b65cacf3
| 9,774
|
py
|
Python
|
encoder_layers_test.py
|
loisaidasam/polyai-models
|
13d8649e8bc653baf0681ab2c548918b0566f3f8
|
[
"Apache-2.0"
] | 2
|
2020-10-16T11:30:59.000Z
|
2021-03-28T04:51:25.000Z
|
encoder_layers_test.py
|
loisaidasam/polyai-models
|
13d8649e8bc653baf0681ab2c548918b0566f3f8
|
[
"Apache-2.0"
] | null | null | null |
encoder_layers_test.py
|
loisaidasam/polyai-models
|
13d8649e8bc653baf0681ab2c548918b0566f3f8
|
[
"Apache-2.0"
] | 7
|
2020-10-04T20:10:28.000Z
|
2021-11-30T11:28:40.000Z
|
"""Unit tests for encoder_layers.py.
Copyright PolyAI Limited.
"""
import tensorflow as tf
import encoder_layers
_TEST_ENCODER = "testdata/tfhub_modules/encoder"
_TEST_EXTRA_CONTEXT_ENCODER = "testdata/tfhub_modules/extra_context_encoder"
class EncoderLayersTest(tf.test.TestCase):
def test_encode_sentences(self):
with self.test_session() as sess:
layer = encoder_layers.SentenceEncoderLayer(_TEST_ENCODER)
encodings = layer(
["hello world", "what's up?", "hello world",
"sentence 4"])
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
self.assertEqual(len(weights), len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
encodings_val = sess.run(encodings)
self.assertEqual(list(encodings_val.shape), [4, 3])
self.assertAllClose(encodings_val[0], encodings_val[2])
grads = tf.gradients(
[encodings] + layer.losses, layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[encodings] + layer.losses, layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_encode_contexts(self):
with self.test_session() as sess:
layer = encoder_layers.ContextEncoderLayer(_TEST_ENCODER)
encodings = layer(
["hello world", "what's up?", "hello world",
"sentence 4"])
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
self.assertEqual(len(weights), len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
encodings_val = sess.run(encodings)
self.assertEqual(list(encodings_val.shape), [4, 5])
self.assertAllClose(encodings_val[0], encodings_val[2])
grads = tf.gradients(
[encodings] + layer.losses, layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[encodings] + layer.losses, layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_encode_responses(self):
with self.test_session() as sess:
layer = encoder_layers.ResponseEncoderLayer(_TEST_ENCODER)
encodings = layer(
["hello world", "what's up?", "hello world",
"sentence 4"])
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
self.assertEqual(len(weights), len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
encodings_val = sess.run(encodings)
self.assertEqual(list(encodings_val.shape), [4, 5])
self.assertAllClose(encodings_val[0], encodings_val[2])
grads = tf.gradients(
[encodings] + layer.losses, layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[encodings] + layer.losses, layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_encode_contexts_and_responses(self):
with self.test_session() as sess:
layer = encoder_layers.ContextAndResponseEncoderLayer(
_TEST_ENCODER)
context_encodings, response_encodings = layer([
["context 1", "context 2"],
["response 1", "response 2", "response 3"],
])
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
# Plus one because the embedding regularization is applied for
# both context and response.
self.assertEqual(len(weights) + 1, len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
context_encodings_val = sess.run(context_encodings)
self.assertEqual(list(context_encodings_val.shape), [2, 5])
response_encodings_val = sess.run(response_encodings)
self.assertEqual(list(response_encodings_val.shape), [3, 5])
grads = tf.gradients(
[context_encodings, response_encodings] + layer.losses,
layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[context_encodings, response_encodings] + layer.losses,
layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_encode_contexts_and_responses_with_extra_contexts(self):
with self.test_session() as sess:
layer = encoder_layers.ContextAndResponseEncoderLayer(
_TEST_EXTRA_CONTEXT_ENCODER, uses_extra_context=True)
context_encodings, response_encodings = layer([
["context 1", "context 2"],
["extra context 1", "extra context 2"],
["response 1", "response 2", "response 3"],
])
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
# Plus two because the embedding regularization is applied for
# context, extra contexts, and response.
self.assertEqual(len(weights) + 2, len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
context_encodings_val = sess.run(context_encodings)
self.assertEqual(list(context_encodings_val.shape), [2, 5])
response_encodings_val = sess.run(response_encodings)
self.assertEqual(list(response_encodings_val.shape), [3, 5])
grads = tf.gradients(
[context_encodings, response_encodings] + layer.losses,
layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[context_encodings, response_encodings] + layer.losses,
layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_encode_to_contextualized_subwords(self):
with self.test_session() as sess:
layer = encoder_layers.ContextualizedSubwordsLayer(_TEST_ENCODER)
tokens, sequence_encodings = layer(
["contextualised subword sequence 1", "sequence encoding 2"]
)
weights = [
var for var in layer.trainable_variables
if "layer_norm" not in var.name
]
self.assertEqual(len(weights), len(layer.losses))
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
tokens_val = sess.run(tokens)
self.assertEqual(list(tokens_val.shape), [2, 26])
sequence_encodings_val = sess.run(sequence_encodings)
self.assertEqual(list(sequence_encodings_val.shape), [2, 26, 3])
grads = tf.gradients(
[sequence_encodings] + layer.losses,
layer.trainable_variables)
for grad in grads:
self.assertIsNotNone(grad)
non_grads = tf.gradients(
[sequence_encodings] + layer.losses,
layer.non_trainable_variables)
for grad in non_grads:
self.assertIsNone(grad)
def test_non_trainable(self):
with self.test_session() as sess:
layer = encoder_layers.ContextualizedSubwordsLayer(
_TEST_ENCODER, trainable=False)
tokens, sequence_encodings = layer(
["contextualised subword sequence 1", "sequence encoding 2"]
)
self.assertEqual(layer.trainable_variables, [])
self.assertEqual(layer.losses, [])
# check layer still works
sess.run([
tf.compat.v1.local_variables_initializer(),
tf.compat.v1.global_variables_initializer(),
tf.compat.v1.tables_initializer(),
])
tokens_val = sess.run(tokens)
self.assertEqual(list(tokens_val.shape), [2, 26])
sequence_encodings_val = sess.run(sequence_encodings)
self.assertEqual(list(sequence_encodings_val.shape), [2, 26, 3])
if __name__ == "__main__":
tf.test.main()
| 41.769231
| 77
| 0.578678
| 988
| 9,774
| 5.507085
| 0.1083
| 0.052931
| 0.038596
| 0.072046
| 0.894872
| 0.894872
| 0.881639
| 0.865098
| 0.859217
| 0.840838
| 0
| 0.011624
| 0.331082
| 9,774
| 233
| 78
| 41.948498
| 0.820587
| 0.027931
| 0
| 0.820896
| 0
| 0
| 0.052476
| 0.007798
| 0
| 0
| 0
| 0
| 0.169154
| 1
| 0.034826
| false
| 0
| 0.00995
| 0
| 0.049751
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4814a41cbbbade8b4ec0caf739a6aa1a8e88712c
| 47,628
|
py
|
Python
|
samples/users/user.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
samples/users/user.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
samples/users/user.py
|
zoho/zohocrm-python-sdk-2.0
|
3a93eb3b57fed4e08f26bd5b311e101cb2995411
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from zcrmsdk.src.com.zoho.crm.api import ParameterMap, HeaderMap
from zcrmsdk.src.com.zoho.crm.api.profiles import Profile
from zcrmsdk.src.com.zoho.crm.api.roles import Role
from zcrmsdk.src.com.zoho.crm.api.users import *
from zcrmsdk.src.com.zoho.crm.api.users import User as ZCRMUser
class User(object):
@staticmethod
def get_users():
"""
This method is used to retrieve the users data specified in the API request.
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Get instance of ParameterMap Class
param_instance = ParameterMap()
# Possible parameters for Get Users operation
param_instance.add(GetUsersParam.page, 1)
param_instance.add(GetUsersParam.per_page, 200)
param_instance.add(GetUsersParam.type, 'ActiveConfirmedUsers')
# Get instance of ParameterMap Class
header_instance = HeaderMap()
# Possible headers for Get Users operation
header_instance.add(GetUsersHeader.if_modified_since, datetime.fromisoformat('2019-07-07T10:00:00+05:30'))
# Call get_users method that takes ParameterMap instance and HeaderMap instance as parameters
response = users_operations.get_users(param_instance, header_instance)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code() == 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received.
if isinstance(response_object, ResponseWrapper):
# Get the list of obtained User instances
user_list = response_object.get_users()
for user in user_list:
# Get the Country of each User
print("User Country: " + str(user.get_country()))
# Get the CustomizeInfo instance of each User
customize_info = user.get_customize_info()
# Check if customizeInfo is not None
if customize_info is not None:
if customize_info.get_notes_desc() is not None:
# Get the NotesDesc of each User
print("User CustomizeInfo NotesDesc: " + str(customize_info.get_notes_desc()))
if customize_info.get_show_right_panel() is not None:
# Get the ShowRightPanel of each User
print("User CustomizeInfo ShowRightPanel: " + str(customize_info.get_show_right_panel()))
if customize_info.get_bc_view() is not None:
# Get the BcView of each User
print("User CustomizeInfo BcView: " + str(customize_info.get_bc_view()))
if customize_info.get_show_home() is not None:
# Get the ShowHome of each User
print("User CustomizeInfo ShowHome: " + str(customize_info.get_show_home()))
if customize_info.get_show_detail_view() is not None:
# Get the ShowDetailView of each User
print("User CustomizeInfo ShowDetailView: " + str(customize_info.get_show_detail_view()))
if customize_info.get_unpin_recent_item() is not None:
# Get the UnpinRecentItem of each User
print("User CustomizeInfo UnpinRecentItem: " + str(customize_info.get_unpin_recent_item()))
# Get the Role instance of each User
role = user.get_role()
if role is not None:
# Get the Name of Role
print("User Role Name: " + str(role.get_name()))
# Get the ID of Role
print("User Role ID: " + str(role.get_id()))
# Get the Signature of each User
print("User Signature: " + str(user.get_signature()))
# Get the City of each User
print("User City: " + str(user.get_city()))
# Get the NameFormat of each User
print("User NameFormat: " + str(user.get_name_format()))
# Get the Language of each User
print("User Language: " + str(user.get_language()))
# Get the Locale of each User
print("User Locale: " + str(user.get_locale()))
# Get the Microsoft of each User
print("User Microsoft: " + str(user.get_microsoft()))
if user.get_personal_account() is not None:
# Get the PersonalAccount of each User
print("User PersonalAccount: " + str(user.get_personal_account()))
# Get the DefaultTabGroup of each User
print("User DefaultTabGroup: " + str(user.get_default_tab_group()))
# Get the Isonline of each User
print("User Isonline: " + str(user.get_isonline()))
# Get the modifiedBy User instance of each User
modified_by = user.get_modified_by()
# Check if modified_by is not null
if modified_by is not None:
# Get the Name of the modifiedBy User
print("User Modified By User-Name: " + str(modified_by.get_name()))
# Get the ID of the modifiedBy User
print("User Modified By User-ID: " + str(modified_by.get_id()))
# Get the Street of each User
print("User Street: " + str(user.get_street()))
# Get the Currency of each User
print("User Currency: " + str(user.get_currency()))
# Get the Alias of each User
print("User Alias: " + str(user.get_alias()))
# Get the Theme instance of each User
theme = user.get_theme()
# Check if theme is not None
if theme is not None:
# Get the TabTheme instance of Theme
normal_tab = theme.get_normal_tab()
# Check if normal_tab is not null
if normal_tab is not None:
# Get the FontColor of NormalTab
print("User Theme NormalTab FontColor: " + str(normal_tab.get_font_color()))
# Get the Background of NormalTab
print("User Theme NormalTab Background: " + str(normal_tab.get_background()))
# Get the TabTheme instance of Theme
selected_tab = theme.get_selected_tab()
# Check if selected_tab is not null
if selected_tab is not None:
# Get the FontColor of selected_tab
print("User Theme Selected Tab FontColor: " + str(selected_tab.get_font_color()))
# Get the Background of selected_tab
print("User Theme Selected Tab Background: " + str(selected_tab.get_background()))
# Get the NewBackground of each Theme
print("User Theme NewBackground: " + str(theme.get_new_background()))
# Get the Background of each Theme
print("User Theme Background: " + str(theme.get_background()))
# Get the Screen of each Theme
print("User Theme Screen: " + str(theme.get_screen()))
# Get the Type of each Theme
print("User Theme Type: " + str(theme.get_type()))
# Get the ID of each User
print("User ID: " + str(user.get_id()))
# Get the State of each User
print("User State: " + str(user.get_state()))
# Get the Fax of each User
print("User Fax: " + str(user.get_fax()))
# Get the CountryLocale of each User
print("User CountryLocale: " + str(user.get_country_locale()))
# Get the FirstName of each User
print("User FirstName: " + str(user.get_first_name()))
# Get the Email of each User
print("User Email: " + str(user.get_email()))
# Get the reportingTo User instance of each User
reporting_to = user.get_reporting_to()
# Check if reporting_to is not None
if reporting_to is not None:
# Get the Name of the reporting_to User
print("User ReportingTo User-Name: " + str(reporting_to.get_name()))
# Get the ID of the reporting_to User
print("User ReportingTo User-ID: " + str(reporting_to.get_id()))
# Get the DecimalSeparator of each User
print("User DecimalSeparator: " + str(user.get_decimal_separator()))
# Get the Zip of each User
print("User Zip: " + str(user.get_zip()))
# Get the CreatedTime of each User
print("User CreatedTime: " + str(user.get_created_time()))
# Get the Website of each User
print("User Website: " + str(user.get_website()))
if user.get_modified_time() is not None:
# Get the ModifiedTime of each User
print("User ModifiedTime: " + str(user.get_modified_time()))
# Get the TimeFormat of each User
print("User TimeFormat: " + str(user.get_time_format()))
# Get the Offset of each User
print("User Offset: " + str(user.get_offset()))
# Get the Profile instance of each User
profile = user.get_profile()
# Check if profile is not None
if profile is not None:
# Get the Name of the profile
print("User Profile Name: " + str(profile.get_name()))
# Get the ID of the profile
print("User Profile ID: " + str(profile.get_id()))
# Get the Mobile of each User
print("User Mobile: " + str(user.get_mobile()))
# Get the LastName of each User
print("User LastName: " + str(user.get_last_name()))
# Get the TimeZone of each User
print("User TimeZone: " + str(user.get_time_zone()))
# Get the Custom Fields, if any
print("Custom Field: " + str(user.get_key_value('Custom_Field')))
# Get the created_by User instance of each User
created_by = user.get_created_by()
# Check if created_by is not None
if created_by is not None:
# Get the Name of the created_by User
print("User Created By User-Name: " + str(created_by.get_name()))
# Get the ID of the created_by User
print("User Created By User-ID: " + str(created_by.get_id()))
# Get the Zuid of each User
print("User Zuid: " + str(user.get_zuid()))
# Get the Confirm of each User
print("User Confirm: " + str(user.get_confirm()))
# Get the FullName of each User
print("User FullName: " + str(user.get_full_name()))
# Get the list of obtained Territory instances
territories = user.get_territories()
# Check if territories is not None
if territories is not None:
for territory in territories:
# Get the Manager of the Territory
print("User Territory Manager: " + str(territory.get_manager()))
# Get the Name of the Territory
print("User Territory Name: " + str(territory.get_name()))
# Get the ID of the Territory
print("User Territory ID: " + str(territory.get_id()))
# Get the Phone of each User
print("User Phone: " + str(user.get_phone()))
# Get the DOB of each User
print("User DOB: " + str(user.get_dob()))
# Get the DateFormat of each User
print("User DateFormat: " + str(user.get_date_format()))
# Get the Status of each User
print("User Status: " + str(user.get_status()))
# Get the obtained Info object
info = response_object.get_info()
if info is not None:
if info.get_per_page() is not None:
# Get the PerPage of the Info
print("User Info PerPage: " + str(info.get_per_page()))
if info.get_count() is not None:
# Get the Count of the Info
print("User Info Count: " + str(info.get_count()))
if info.get_page() is not None:
# Get the Page of the Info
print("User Info Page: " + str(info.get_page()))
if info.get_more_records() is not None:
# Get the MoreRecords of the Info
print("User Info MoreRecords: " + str(info.get_more_records()))
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def get_user(user_id):
"""
This method is used to get the details of any specific user with ID
:param user_id: The ID of the User to be obtained
"""
"""
example
user_id = 3409643000000302031
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Get instance of ParameterMap Class
header_instance = HeaderMap()
# Possible headers for Get User operation
header_instance.add(GetUserHeader.if_modified_since, datetime.fromisoformat('2019-07-07T10:00:00+05:30'))
# Call get_user method that takes HeaderMap instance and user_id as parameters
response = users_operations.get_user(user_id, header_instance)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code() == 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received.
if isinstance(response_object, ResponseWrapper):
# Get the list of obtained User instances
user_list = response_object.get_users()
for user in user_list:
# Get the Country of each User
print("User Country: " + str(user.get_country()))
# Get the CustomizeInfo instance of each User
customize_info = user.get_customize_info()
# Check if customizeInfo is not None
if customize_info is not None:
if customize_info.get_notes_desc() is not None:
# Get the NotesDesc of each User
print("User CustomizeInfo NotesDesc: " + str(customize_info.get_notes_desc()))
if customize_info.get_show_right_panel() is not None:
# Get the ShowRightPanel of each User
print(
"User CustomizeInfo ShowRightPanel: " + str(customize_info.get_show_right_panel()))
if customize_info.get_bc_view() is not None:
# Get the BcView of each User
print("User CustomizeInfo BcView: " + str(customize_info.get_bc_view()))
if customize_info.get_show_home() is not None:
# Get the ShowHome of each User
print("User CustomizeInfo ShowHome: " + str(customize_info.get_show_home()))
if customize_info.get_show_detail_view() is not None:
# Get the ShowDetailView of each User
print(
"User CustomizeInfo ShowDetailView: " + str(customize_info.get_show_detail_view()))
if customize_info.get_unpin_recent_item() is not None:
# Get the UnpinRecentItem of each User
print("User CustomizeInfo UnpinRecentItem: " + str(
customize_info.get_unpin_recent_item()))
# Get the Role instance of each User
role = user.get_role()
if role is not None:
# Get the Name of Role
print("User Role Name: " + str(role.get_name()))
# Get the ID of Role
print("User Role ID: " + str(role.get_id()))
# Get the Signature of each User
print("User Signature: " + str(user.get_signature()))
# Get the City of each User
print("User City: " + str(user.get_city()))
# Get the NameFormat of each User
print("User NameFormat: " + str(user.get_name_format()))
# Get the Language of each User
print("User Language: " + str(user.get_language()))
# Get the Locale of each User
print("User Locale: " + str(user.get_locale()))
# Get the Microsoft of each User
print("User Microsoft: " + str(user.get_microsoft()))
if user.get_personal_account() is not None:
# Get the PersonalAccount of each User
print("User PersonalAccount: " + str(user.get_personal_account()))
# Get the DefaultTabGroup of each User
print("User DefaultTabGroup: " + str(user.get_default_tab_group()))
# Get the Isonline of each User
print("User Isonline: " + str(user.get_isonline()))
# Get the modifiedBy User instance of each User
modified_by = user.get_modified_by()
# Check if modified_by is not null
if modified_by is not None:
# Get the Name of the modifiedBy User
print("User Modified By User-Name: " + str(modified_by.get_name()))
# Get the ID of the modifiedBy User
print("User Modified By User-ID: " + str(modified_by.get_id()))
# Get the Street of each User
print("User Street: " + str(user.get_street()))
# Get the Currency of each User
print("User Currency: " + str(user.get_currency()))
# Get the Alias of each User
print("User Alias: " + str(user.get_alias()))
# Get the Theme instance of each User
theme = user.get_theme()
# Check if theme is not None
if theme is not None:
# Get the TabTheme instance of Theme
normal_tab = theme.get_normal_tab()
# Check if normal_tab is not null
if normal_tab is not None:
# Get the FontColor of NormalTab
print("User Theme NormalTab FontColor: " + str(normal_tab.get_font_color()))
# Get the Background of NormalTab
print("User Theme NormalTab Background: " + str(normal_tab.get_background()))
# Get the TabTheme instance of Theme
selected_tab = theme.get_selected_tab()
# Check if selected_tab is not null
if selected_tab is not None:
# Get the FontColor of selected_tab
print("User Theme Selected Tab FontColor: " + str(selected_tab.get_font_color()))
# Get the Background of selected_tab
print("User Theme Selected Tab Background: " + str(selected_tab.get_background()))
# Get the NewBackground of each Theme
print("User Theme NewBackground: " + str(theme.get_new_background()))
# Get the Background of each Theme
print("User Theme Background: " + str(theme.get_background()))
# Get the Screen of each Theme
print("User Theme Screen: " + str(theme.get_screen()))
# Get the Type of each Theme
print("User Theme Type: " + str(theme.get_type()))
# Get the ID of each User
print("User ID: " + str(user.get_id()))
# Get the State of each User
print("User State: " + str(user.get_state()))
# Get the Fax of each User
print("User Fax: " + str(user.get_fax()))
# Get the CountryLocale of each User
print("User CountryLocale: " + str(user.get_country_locale()))
# Get the FirstName of each User
print("User FirstName: " + str(user.get_first_name()))
# Get the Email of each User
print("User Email: " + str(user.get_email()))
# Get the reportingTo User instance of each User
reporting_to = user.get_reporting_to()
# Check if reporting_to is not None
if reporting_to is not None:
# Get the Name of the reporting_to User
print("User ReportingTo User-Name: " + str(reporting_to.get_name()))
# Get the ID of the reporting_to User
print("User ReportingTo User-ID: " + str(reporting_to.get_id()))
# Get the DecimalSeparator of each User
print("User DecimalSeparator: " + str(user.get_decimal_separator()))
# Get the Zip of each User
print("User Zip: " + str(user.get_zip()))
# Get the CreatedTime of each User
print("User CreatedTime: " + str(user.get_created_time()))
# Get the Website of each User
print("User Website: " + str(user.get_website()))
if user.get_modified_time() is not None:
# Get the ModifiedTime of each User
print("User ModifiedTime: " + str(user.get_modified_time()))
# Get the TimeFormat of each User
print("User TimeFormat: " + str(user.get_time_format()))
# Get the Offset of each User
print("User Offset: " + str(user.get_offset()))
# Get the Profile instance of each User
profile = user.get_profile()
# Check if profile is not None
if profile is not None:
# Get the Name of the profile
print("User Profile Name: " + str(profile.get_name()))
# Get the ID of the profile
print("User Profile ID: " + str(profile.get_id()))
# Get the Mobile of each User
print("User Mobile: " + str(user.get_mobile()))
# Get the LastName of each User
print("User LastName: " + str(user.get_last_name()))
# Get the TimeZone of each User
print("User TimeZone: " + str(user.get_time_zone()))
# Get the Custom Fields, if any
print("Custom Field: " + str(user.get_key_value('Custom_Field')))
# Get the created_by User instance of each User
created_by = user.get_created_by()
# Check if created_by is not None
if created_by is not None:
# Get the Name of the created_by User
print("User Created By User-Name: " + str(created_by.get_name()))
# Get the ID of the created_by User
print("User Created By User-ID: " + str(created_by.get_id()))
# Get the Zuid of each User
print("User Zuid: " + str(user.get_zuid()))
# Get the Confirm of each User
print("User Confirm: " + str(user.get_confirm()))
# Get the FullName of each User
print("User FullName: " + str(user.get_full_name()))
# Get the list of obtained Territory instances
territories = user.get_territories()
# Check if territories is not None
if territories is not None:
for territory in territories:
# Get the Manager of the Territory
print("User Territory Manager: " + str(territory.get_manager()))
# Get the Name of the Territory
print("User Territory Name: " + str(territory.get_name()))
# Get the ID of the Territory
print("User Territory ID: " + str(territory.get_id()))
# Get the Phone of each User
print("User Phone: " + str(user.get_phone()))
# Get the DOB of each User
print("User DOB: " + str(user.get_dob()))
# Get the DateFormat of each User
print("User DateFormat: " + str(user.get_date_format()))
# Get the Status of each User
print("User Status: " + str(user.get_status()))
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def update_users():
"""
This method is used to update the details of multiple users of your organization and print the response.
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Get instance of BodyWrapper Class that will contain the request body
request = BodyWrapper()
# List to hold User instances
user_list = []
# Get instance of User Class
user_1 = ZCRMUser()
# Set ID to User instance
user_1.set_id(3477061000011244009)
# Get instance of Role Class
role = Role()
# Set ID to Role instance
role.set_id(3477061000000026008)
# Set role instance to role in User
user_1.set_role(role)
user_1.set_country_locale('en_US')
# Add User instance to list
user_list.append(user_1)
# Get instance of User Class
user_2 = ZCRMUser()
# Set ID to User instance
user_2.set_id(3409643000000302042)
# Get instance of Role Class
role = Role()
# Set ID to Role instance
role.set_id(3409643000000026008)
# Set role instance to role in User
user_2.set_role(role)
user_2.set_country_locale('en_US')
# Add User instance to list
user_list.append(user_2)
# Set the list to users in BodyWrapper
request.set_users(user_list)
# Call update_users method that takes BodyWrapper instance as parameter
response = users_operations.update_users(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the list of obtained ActionResponse instances
action_response_list = response_object.get_users()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def update_user(user_id):
"""
This method is used to update the details of any specific user with ID
:param user_id: The ID of the User to be updated
"""
"""
example
user_id = 3409643000000302031
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Get instance of BodyWrapper Class that will contain the request body
request = BodyWrapper()
# List to hold User instances
user_list = []
# Get instance of User Class
user_1 = ZCRMUser()
# Get instance of Role Class
role = Role()
# Set ID to Role instance
role.set_id(3477061000000026008)
# Set role instance to role in User
user_1.set_role(role)
# Add User instance to list
user_list.append(user_1)
# Set the list to users in BodyWrapper
request.set_users(user_list)
# Call update_user method that takes BodyWrapper instance and user_id as parameters
response = users_operations.update_user(user_id, request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the list of obtained ActionResponse instances
action_response_list = response_object.get_users()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def delete_user(user_id):
"""
This method is used to delete a user from your organization and print the response.
:param user_id: The ID of the User to be deleted
"""
"""
example
user_id = 3409643000000302031
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Call delete_user method that takes user_id as parameter
response = users_operations.delete_user(user_id)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the list of obtained ActionResponse instances
action_response_list = response_object.get_users()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def create_user():
"""
This method is used to add a user to your organization and print the response.
"""
# Get instance of UsersOperations Class
users_operations = UsersOperations()
# Get instance of RequestWrapper Class that will contain the request body
request = RequestWrapper()
# List to hold User instances
user_list = []
# Get instance of User Class
user = ZCRMUser()
# Get instance of Role Class
role = Role()
# Set ID to Role instance
role.set_id(3477061000000026008)
# Set role instance to role in User
user.set_role(role)
user.set_country_locale('en_US')
user.set_first_name('Test')
user.set_last_name('User')
user.set_email('testuser@zoho.com')
# Get instance of Profile Class
profile = Profile()
# Set ID to Profile instance
profile.set_id(3477061000000026014)
# Set profile instance to profile in User instance
user.set_profile(profile)
# Add the User instance to list
user_list.append(user)
# Set the list to users in BodyWrapper instance
request.set_users(user_list)
# Call create_user method that takes RequestWrapper class instance as parameter
response = users_operations.create_user(request)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ActionWrapper instance is received.
if isinstance(response_object, ActionWrapper):
# Get the list of obtained ActionResponse instances
action_response_list = response_object.get_users()
for action_response in action_response_list:
# Check if the request is successful
if isinstance(action_response, SuccessResponse):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(action_response, APIException):
# Get the Status
print("Status: " + action_response.get_status().get_value())
# Get the Code
print("Code: " + action_response.get_code().get_value())
print("Details")
# Get the details dict
details = action_response.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + action_response.get_message().get_value())
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
| 41.523976
| 123
| 0.490867
| 4,787
| 47,628
| 4.733236
| 0.050554
| 0.058787
| 0.043252
| 0.05561
| 0.938521
| 0.931062
| 0.914865
| 0.907582
| 0.899903
| 0.889311
| 0
| 0.009746
| 0.437705
| 47,628
| 1,146
| 124
| 41.560209
| 0.836302
| 0.221487
| 0
| 0.860731
| 0
| 0
| 0.090277
| 0.001374
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0
| 0.013699
| 0
| 0.034247
| 0.479452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
486466db68cbcf5eea5ccc9b96dfe6e362a5ef05
| 1,939
|
py
|
Python
|
authnz/tests.py
|
NERSC/newt-2.0
|
9b16d61b3bb6b03d731573fc11c85b1527e8a40a
|
[
"BSD-2-Clause"
] | 11
|
2015-04-15T02:09:51.000Z
|
2022-03-06T15:54:36.000Z
|
authnz/tests.py
|
shreddd/newt-2.0
|
8ef448ac1a6f34c2f16b0deafdcdc27ebfc255ae
|
[
"BSD-2-Clause"
] | 21
|
2019-06-06T17:54:09.000Z
|
2021-07-14T05:57:14.000Z
|
authnz/tests.py
|
shreddd/newt-2.0
|
8ef448ac1a6f34c2f16b0deafdcdc27ebfc255ae
|
[
"BSD-2-Clause"
] | 3
|
2015-08-27T00:42:43.000Z
|
2019-04-04T23:10:18.000Z
|
from django.test import TestCase
from django.conf import settings
import json
from newt.tests import MyTestClient, newt_base_url, login
class AuthTests(TestCase):
fixtures = ["test_fixture.json"]
def setUp(self):
self.client = MyTestClient()
def test_login(self):
# Should not be logged in
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
# Should be logged in
r = self.client.post(newt_base_url + "/auth", data=login)
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
# Loggen in self.client should return user info
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
def test_logout(self):
# Should be logged in
r = self.client.post(newt_base_url + "/auth", data=login)
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], True)
self.assertEquals(json_response['output']['username'], login['username'])
r = self.client.delete(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
r = self.client.get(newt_base_url + "/auth")
self.assertEquals(r.status_code, 200)
json_response = r.json()
self.assertEquals(json_response['output']['auth'], False)
| 37.288462
| 81
| 0.645178
| 241
| 1,939
| 5.033195
| 0.195021
| 0.197857
| 0.148392
| 0.207749
| 0.761748
| 0.761748
| 0.753504
| 0.753504
| 0.753504
| 0.753504
| 0
| 0.011881
| 0.218669
| 1,939
| 51
| 82
| 38.019608
| 0.788779
| 0.056215
| 0
| 0.702703
| 0
| 0
| 0.094795
| 0
| 0
| 0
| 0
| 0
| 0.405405
| 1
| 0.081081
| false
| 0
| 0.108108
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
48672fefcb2e4beea8dc535553ddf7aeb833ebbf
| 3,065
|
py
|
Python
|
test/pyaz/sql/midb/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/sql/midb/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/sql/midb/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def list_deleted(resource_group, managed_instance):
params = get_params(locals())
command = "az sql midb list-deleted " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def create(collation=None, name, managed_instance, resource_group, no_wait=None):
params = get_params(locals())
command = "az sql midb create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def restore(deleted_time=None, dest_name, dest_mi=None, time, name, managed_instance, resource_group, dest_resource_group=None, no_wait=None):
params = get_params(locals())
command = "az sql midb restore " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, managed_instance, name):
params = get_params(locals())
command = "az sql midb show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group, managed_instance):
params = get_params(locals())
command = "az sql midb list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, managed_instance, name, yes=None, no_wait=None):
params = get_params(locals())
command = "az sql midb delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 34.829545
| 142
| 0.665905
| 380
| 3,065
| 5.289474
| 0.134211
| 0.083582
| 0.059701
| 0.062687
| 0.900995
| 0.837313
| 0.837313
| 0.837313
| 0.818905
| 0.818905
| 0
| 0.005015
| 0.21925
| 3,065
| 87
| 143
| 35.229885
| 0.834935
| 0
| 0
| 0.825
| 0
| 0
| 0.057749
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025
| null | null | 0.225
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6fb08411545e1be1abe5f27ff5f9080629fecd01
| 65,233
|
gyp
|
Python
|
libtgvoip.gyp
|
seven1240/libtgvoip
|
78decc81bf25cf36ad1b4a9398aa11cb195db9c5
|
[
"Unlicense"
] | 3
|
2019-09-05T09:42:24.000Z
|
2022-02-22T18:46:00.000Z
|
libtgvoip.gyp
|
seven1240/libtgvoip
|
78decc81bf25cf36ad1b4a9398aa11cb195db9c5
|
[
"Unlicense"
] | null | null | null |
libtgvoip.gyp
|
seven1240/libtgvoip
|
78decc81bf25cf36ad1b4a9398aa11cb195db9c5
|
[
"Unlicense"
] | 1
|
2019-03-04T21:02:42.000Z
|
2019-03-04T21:02:42.000Z
|
# GYP project file for TDesktop
{
'targets': [
{
'target_name': 'libtgvoip',
'type': 'static_library',
'dependencies': [],
'defines': [
'WEBRTC_APM_DEBUG_DUMP=0',
'TGVOIP_USE_DESKTOP_DSP',
'WEBRTC_NS_FLOAT',
],
'variables': {
'tgvoip_src_loc': '.',
'official_build_target%': '',
'linux_path_opus_include%': '<(DEPTH)/../../../Libraries/opus/include',
},
'include_dirs': [
'<(tgvoip_src_loc)/webrtc_dsp',
'<(linux_path_opus_include)',
],
'direct_dependent_settings': {
'include_dirs': [
'<(tgvoip_src_loc)',
],
},
'export_dependent_settings': [],
'sources': [
'<(tgvoip_src_loc)/BlockingQueue.cpp',
'<(tgvoip_src_loc)/BlockingQueue.h',
'<(tgvoip_src_loc)/Buffers.cpp',
'<(tgvoip_src_loc)/Buffers.h',
'<(tgvoip_src_loc)/CongestionControl.cpp',
'<(tgvoip_src_loc)/CongestionControl.h',
'<(tgvoip_src_loc)/EchoCanceller.cpp',
'<(tgvoip_src_loc)/EchoCanceller.h',
'<(tgvoip_src_loc)/JitterBuffer.cpp',
'<(tgvoip_src_loc)/JitterBuffer.h',
'<(tgvoip_src_loc)/logging.cpp',
'<(tgvoip_src_loc)/logging.h',
'<(tgvoip_src_loc)/MediaStreamItf.cpp',
'<(tgvoip_src_loc)/MediaStreamItf.h',
'<(tgvoip_src_loc)/OpusDecoder.cpp',
'<(tgvoip_src_loc)/OpusDecoder.h',
'<(tgvoip_src_loc)/OpusEncoder.cpp',
'<(tgvoip_src_loc)/OpusEncoder.h',
'<(tgvoip_src_loc)/threading.h',
'<(tgvoip_src_loc)/VoIPController.cpp',
'<(tgvoip_src_loc)/VoIPGroupController.cpp',
'<(tgvoip_src_loc)/VoIPController.h',
'<(tgvoip_src_loc)/PrivateDefines.h',
'<(tgvoip_src_loc)/VoIPServerConfig.cpp',
'<(tgvoip_src_loc)/VoIPServerConfig.h',
'<(tgvoip_src_loc)/audio/AudioInput.cpp',
'<(tgvoip_src_loc)/audio/AudioInput.h',
'<(tgvoip_src_loc)/audio/AudioOutput.cpp',
'<(tgvoip_src_loc)/audio/AudioOutput.h',
'<(tgvoip_src_loc)/audio/Resampler.cpp',
'<(tgvoip_src_loc)/audio/Resampler.h',
'<(tgvoip_src_loc)/NetworkSocket.cpp',
'<(tgvoip_src_loc)/NetworkSocket.h',
'<(tgvoip_src_loc)/PacketReassembler.cpp',
'<(tgvoip_src_loc)/PacketReassembler.h',
'<(tgvoip_src_loc)/MessageThread.cpp',
'<(tgvoip_src_loc)/MessageThread.h',
'<(tgvoip_src_loc)/audio/AudioIO.cpp',
'<(tgvoip_src_loc)/audio/AudioIO.h',
'<(tgvoip_src_loc)/video/ScreamCongestionController.cpp',
'<(tgvoip_src_loc)/video/ScreamCongestionController.h',
'<(tgvoip_src_loc)/video/VideoSource.cpp',
'<(tgvoip_src_loc)/video/VideoSource.h',
'<(tgvoip_src_loc)/video/VideoRenderer.cpp',
'<(tgvoip_src_loc)/video/VideoRenderer.h',
'<(tgvoip_src_loc)/video/VideoPacketSender.cpp',
'<(tgvoip_src_loc)/video/VideoPacketSender.h',
'<(tgvoip_src_loc)/video/VideoFEC.cpp',
'<(tgvoip_src_loc)/video/VideoFEC.h',
'<(tgvoip_src_loc)/json11.cpp',
'<(tgvoip_src_loc)/json11.hpp',
# Windows
'<(tgvoip_src_loc)/os/windows/NetworkSocketWinsock.cpp',
'<(tgvoip_src_loc)/os/windows/NetworkSocketWinsock.h',
'<(tgvoip_src_loc)/os/windows/AudioInputWave.cpp',
'<(tgvoip_src_loc)/os/windows/AudioInputWave.h',
'<(tgvoip_src_loc)/os/windows/AudioOutputWave.cpp',
'<(tgvoip_src_loc)/os/windows/AudioOutputWave.h',
'<(tgvoip_src_loc)/os/windows/AudioOutputWASAPI.cpp',
'<(tgvoip_src_loc)/os/windows/AudioOutputWASAPI.h',
'<(tgvoip_src_loc)/os/windows/AudioInputWASAPI.cpp',
'<(tgvoip_src_loc)/os/windows/AudioInputWASAPI.h',
'<(tgvoip_src_loc)/os/windows/WindowsSpecific.cpp',
'<(tgvoip_src_loc)/os/windows/WindowsSpecific.h',
# macOS
'<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnit.cpp',
'<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnit.h',
'<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnit.cpp',
'<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnit.h',
'<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnitOSX.cpp',
'<(tgvoip_src_loc)/os/darwin/AudioInputAudioUnitOSX.h',
'<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnitOSX.cpp',
'<(tgvoip_src_loc)/os/darwin/AudioOutputAudioUnitOSX.h',
'<(tgvoip_src_loc)/os/darwin/AudioUnitIO.cpp',
'<(tgvoip_src_loc)/os/darwin/AudioUnitIO.h',
'<(tgvoip_src_loc)/os/darwin/DarwinSpecific.mm',
'<(tgvoip_src_loc)/os/darwin/DarwinSpecific.h',
# Linux
'<(tgvoip_src_loc)/os/linux/AudioInputALSA.cpp',
'<(tgvoip_src_loc)/os/linux/AudioInputALSA.h',
'<(tgvoip_src_loc)/os/linux/AudioOutputALSA.cpp',
'<(tgvoip_src_loc)/os/linux/AudioOutputALSA.h',
'<(tgvoip_src_loc)/os/linux/AudioOutputPulse.cpp',
'<(tgvoip_src_loc)/os/linux/AudioOutputPulse.h',
'<(tgvoip_src_loc)/os/linux/AudioInputPulse.cpp',
'<(tgvoip_src_loc)/os/linux/AudioInputPulse.h',
'<(tgvoip_src_loc)/os/linux/AudioPulse.cpp',
'<(tgvoip_src_loc)/os/linux/AudioPulse.h',
# POSIX
'<(tgvoip_src_loc)/os/posix/NetworkSocketPosix.cpp',
'<(tgvoip_src_loc)/os/posix/NetworkSocketPosix.h',
# WebRTC APM
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/field_trial.h',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/cpu_features_wrapper.h',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/asm_defines.h',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/metrics.h',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/include/compile_assert_c.h',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/field_trial.cc',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/metrics.cc',
'<(tgvoip_src_loc)/webrtc_dsp/system_wrappers/source/cpu_features.cc',
'<(tgvoip_src_loc)/webrtc_dsp/typedefs.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/internal/memutil.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/internal/memutil.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/string_view.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/ascii.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/ascii.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/strings/string_view.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/types/optional.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/types/bad_optional_access.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/types/bad_optional_access.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/types/optional.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/memory/memory.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/meta/type_traits.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/algorithm/algorithm.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/container/inlined_vector.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/policy_checks.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/port.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/config.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/raw_logging.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/throw_delegate.cc',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/invoke.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/inline_variable.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/atomic_hook.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/identity.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/raw_logging.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/internal/throw_delegate.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/attributes.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/macros.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/optimization.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/base/log_severity.h',
'<(tgvoip_src_loc)/webrtc_dsp/absl/utility/utility.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/string_to_number.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/constructormagic.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/race_checker.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/strings/string_builder.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/strings/string_builder.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event_tracer.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringencode.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/memory/aligned_malloc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/memory/aligned_malloc.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/timeutils.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/ignore_wundef.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringutils.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/arraysize.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_file.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/swap_queue.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/string_to_number.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/trace_event.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/checks.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/deprecation.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/sanitizer.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/scoped_ref_ptr.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/timeutils.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/atomicops.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringencode.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/stringutils.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/checks.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_minmax.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_conversions.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_conversions_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/numerics/safe_compare.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/unused.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/inline.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/ignore_warnings.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/asm_defines.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/rtc_export.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/system/arch.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread_types.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/protobuf_utils.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_annotations.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/gtest_prod_util.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/function_view.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/criticalsection.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/criticalsection.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_thread_types.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcount.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/event_tracer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/compile_assert_c.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_webrtc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/type_traits.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/platform_file.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcounter.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/thread_checker.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/race_checker.h',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/refcountedobject.h',
'<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_vad_weights.cc',
'<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_activations.h',
'<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/kiss_fft.h',
'<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/kiss_fft.cc',
'<(tgvoip_src_loc)/webrtc_dsp/third_party/rnnoise/src/rnn_vad_weights.h',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/audio_frame.cc',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_config.h',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_control.h',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/audio_frame.h',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_config.cc',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_factory.h',
'<(tgvoip_src_loc)/webrtc_dsp/api/audio/echo_canceller3_factory.cc',
'<(tgvoip_src_loc)/webrtc_dsp/api/array_view.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/third_party/fft/fft.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/third_party/fft/fft.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/bandwidth_info.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/include/isac.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filterbanks.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/entropy_coding.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_vad.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/settings.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/transform.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/crc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_filter.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filter_functions.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/decode.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lattice.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/intialize.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_float_type.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/codec.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/entropy_coding.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac_vad.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/structs.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/filter_functions.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/arith_routines.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/crc.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/decode_bwe.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/isac.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_coding/codecs/isac/main/source/lpc_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/rms_level.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/moving_max.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/circular_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/normalized_covariance_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/normalized_covariance_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/moving_max.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/circular_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/mean_variance_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_detector/mean_variance_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_for_experimental_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/splitting_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/rms_level.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/ns_core.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression_x.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core_c.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/defines.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/ns_core.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/windows_private.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression_x.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/noise_suppression.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_defines.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/residual_echo_detector.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_processing_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/typing_detection.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/render_queue_item_verifier.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/config.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_frame_view.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/mock_audio_processing.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/gain_control.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator_factory.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing_statistics.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_generator_factory.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/aec_dump.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/aec_dump.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing_statistics.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/audio_processing.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/include/config.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/interpolated_gain_curve.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/biquad_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/interpolated_gain_curve.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_common.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_testing_common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/gain_applier.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/signal_classifier.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_agc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_digital_gain_applier.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/saturation_protector.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vector_float_frame.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/rnn.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/rnn.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/test_utils.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_info.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/lp_residual.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/ring_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/features_extraction.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/fft_util.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/spectral_features.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/pitch_search.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/features_extraction.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/fft_util.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/rnn_vad/lp_residual.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_gain_controller.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vector_float_frame.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/down_sampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_level_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_testing_common.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_digital_level_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_gain_controller.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/saturation_protector.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vad_with_level.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter_db_gain_curve.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/agc2_common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_digital_gain_applier.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/vad_with_level.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter_db_gain_curve.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/fixed_digital_level_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/gain_applier.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/down_sampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_level_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/signal_classifier.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_spectrum_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/compute_interpolated_gain_curve.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/compute_interpolated_gain_curve.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/biquad_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/noise_spectrum_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/limiter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc2/adaptive_mode_level_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/moving_moments.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_detector.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_tree.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_suppressor.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_node.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/moving_moments.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_tree.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/wpd_node.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_suppressor.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/transient_detector.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/transient/dyadic_decimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/low_cut_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/noise_suppression_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/level_estimator_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/three_band_filter_bank.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/echo_cancellation.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_resampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_resampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/echo_cancellation.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_optimized_methods.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_sse2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/voice_detection_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/voice_detection_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_cancellation_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_for_experimental_agc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/loudness_histogram.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc_manager_direct.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/analog_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/gain_control.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/digital_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/analog_agc.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/legacy/digital_agc.c',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/utility.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/mock_agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/loudness_histogram.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/gain_map_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/utility.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc_manager_direct.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/agc/agc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_processing_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_control_mobile_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/splitting_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/low_cut_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_generator/file_audio_generator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/audio_generator/file_audio_generator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_controller2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/three_band_filter_bank.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/residual_echo_detector.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_cancellation_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/noise_suppression_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/level_estimator_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_controller2.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_defines.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core_c.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/echo_control_mobile.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/echo_control_mobile.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_reverb_model.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/downsampled_render_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output_analyzer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_fallback.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/residual_echo_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/shadow_filter_update_gain.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover_metrics.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec_state.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_variability.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/frame_blocker.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_delay_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/adaptive_fir_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/cascaded_biquad_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_signal_analyzer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_fft.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_fft.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover_metrics.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fullband_erle_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/filter_analyzer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_delay_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subband_erle_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller_metrics.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor_metrics.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erl_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec_state.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/adaptive_fir_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_data.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/skew_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller_metrics.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/comfort_noise_generator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_delay_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erl_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_framer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erle_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/cascaded_biquad_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matrix_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/stationarity_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_signal_analyzer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_path_variability.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/moving_average.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_reverb_model.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subtractor_output_analyzer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_audibility.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor_metrics.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/moving_average.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/erle_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/subband_erle_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_common.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/residual_echo_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fullband_erle_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/stationarity_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_canceller3.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/skew_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_decay_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_controller2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain_limiter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/main_filter_update_gain.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_remover.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model_fallback.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/downsampled_render_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matrix_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_frequency_response.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_audibility.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_processor2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/echo_canceller3.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_delay_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/aec3_common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/fft_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/vector_math.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/decimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/frame_blocker.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/block_framer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/suppression_gain_limiter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/delay_estimate.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/comfort_noise_generator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_model.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/main_filter_update_gain.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/matched_filter_lag_aggregator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/shadow_filter_update_gain.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/filter_analyzer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_decay_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/reverb_frequency_response.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/decimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec3/render_delay_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/echo_control_mobile_impl.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/gain_control_impl.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/typing_detection.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/logging/apm_data_dumper.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/logging/apm_data_dumper.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_activity_detector.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/standalone_vad.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_internal.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_circular_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_circular_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_based_vad.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pole_zero_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pole_zero_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_based_vad.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/gmm.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/vad_audio_proc.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_gmm_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/noise_gmm_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/pitch_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/gmm.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/standalone_vad.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/vad/voice_activity_detector.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_wrapper.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_sse2.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/block_mean_calculator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/block_mean_calculator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_tables_common.h',
'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/delay_estimator_wrapper.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/mocks/mock_smoothing_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_file.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/window_generator.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/channel_buffer.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_factory.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/sparse_fir_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_sse.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/window_generator.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/ring_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/include/audio_util.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_header.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier_ooura.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_util.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier_ooura.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_sse.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/smoothing_filter.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_sinc_resampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/resampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler_sse.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/include/push_resampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/include/resampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_sinc_resampler.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/push_resampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinusoidal_linear_chirp_source.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinusoidal_linear_chirp_source.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_factory.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_converter.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_file.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/fft4g/fft4g.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/fft4g/fft4g.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/audio_converter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/channel_buffer.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/real_fourier.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/sparse_fir_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/smoothing_filter.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_c.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/ring_buffer.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_c.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_fft_tables.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_fft.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ma_fast_q12.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/levinson_durbin.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/dot_product_with_scale.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/auto_corr_to_refl_coef.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2_internal.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/energy.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/downsample_fast.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/splitting_filter1.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar_fast_q12.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_init.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/lpc_to_refl_coef.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/cross_correlation.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/signal_processing_library.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/real_fft.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/spl_inl.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/division_operations.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/auto_correlation.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/get_scaling_square.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/dot_product_with_scale.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2_internal.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/min_max_operations.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/refl_coef_to_lpc.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/vector_scaling_operations.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_fractional.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/real_fft.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/ilbc_specific_functions.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_bit_reverse.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/randomization_functions.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/copy_set_operations.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_by_2.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/get_hanning_window.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/resample_48khz.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_inl.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/spl_sqrt.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/wav_header.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_sp.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad.cc',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/webrtc_vad.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_core.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/include/vad.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/include/webrtc_vad.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_gmm.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_filterbank.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_core.c',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_sp.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_filterbank.h',
'<(tgvoip_src_loc)/webrtc_dsp/common_audio/vad/vad_gmm.c',
# ARM/NEON sources
# TODO check if there's a good way to make these compile with ARM ports of TDesktop
#'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/ns/nsx_core_neon.c',
#'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aec/aec_core_neon.cc',
#'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/aecm/aecm_core_neon.cc',
#'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h',
#'<(tgvoip_src_loc)/webrtc_dsp/modules/audio_processing/utility/ooura_fft_neon.cc',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_neon.cc',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/resampler/sinc_resampler_neon.cc',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor_arm.S',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/fir_filter_neon.h',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/downsample_fast_neon.c',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/complex_bit_reverse_arm.S',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/include/spl_inl_armv7.h',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/min_max_operations_neon.c',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/cross_correlation_neon.c',
#'<(tgvoip_src_loc)/webrtc_dsp/common_audio/signal_processing/filter_ar_fast_q12_armv7.S',
],
'libraries': [],
'configurations': {
'Debug': {},
'Release': {},
},
'conditions': [
[
'"<(OS)" != "win"', {
'sources/': [['exclude', '<(tgvoip_src_loc)/os/windows/']],
}, {
'sources/': [['exclude', '<(tgvoip_src_loc)/os/posix/']],
},
],
[
'"<(OS)" != "mac"', {
'sources/': [['exclude', '<(tgvoip_src_loc)/os/darwin/']],
},
],
[
'"<(OS)" != "linux"', {
'sources/': [['exclude', '<(tgvoip_src_loc)/os/linux/']],
},
],
[
'"<(OS)" == "mac"', {
'xcode_settings': {
'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',
'ALWAYS_SEARCH_USER_PATHS': 'NO',
},
'defines': [
'WEBRTC_POSIX',
'WEBRTC_MAC',
'TARGET_OS_OSX',
],
'sources': [
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.mm',
'<(tgvoip_src_loc)/webrtc_dsp/rtc_base/logging_mac.h',
],
'conditions': [
[ '"<(official_build_target)" == "mac32"', {
'xcode_settings': {
'MACOSX_DEPLOYMENT_TARGET': '10.6',
'OTHER_CPLUSPLUSFLAGS': [ '-nostdinc++' ],
},
'include_dirs': [
'/usr/local/macold/include/c++/v1',
'<(DEPTH)/../../../Libraries/macold/openssl/include',
],
'defines': [
'TARGET_OSX32',
],
}, {
'xcode_settings': {
'MACOSX_DEPLOYMENT_TARGET': '10.8',
'CLANG_CXX_LIBRARY': 'libc++',
},
'include_dirs': [
'<(DEPTH)/../../../Libraries/openssl/include',
],
'direct_dependent_settings': {
'linkflags': [
'-framework VideoToolbox',
],
},
'sources': [
'<(tgvoip_src_loc)/os/darwin/TGVVideoRenderer.mm',
'<(tgvoip_src_loc)/os/darwin/TGVVideoRenderer.h',
'<(tgvoip_src_loc)/os/darwin/TGVVideoSource.mm',
'<(tgvoip_src_loc)/os/darwin/TGVVideoSource.h',
'<(tgvoip_src_loc)/os/darwin/VideoToolboxEncoderSource.mm',
'<(tgvoip_src_loc)/os/darwin/VideoToolboxEncoderSource.h',
'<(tgvoip_src_loc)/os/darwin/SampleBufferDisplayLayerRenderer.mm',
'<(tgvoip_src_loc)/os/darwin/SampleBufferDisplayLayerRenderer.h',
],
}],
['"<(official_build_target)" == "macstore"', {
'defines': [
'TGVOIP_NO_OSX_PRIVATE_API',
],
}],
],
},
],
[
'"<(OS)" == "win"', {
'msbuild_toolset': 'v141',
'defines': [
'NOMINMAX',
'_USING_V110_SDK71_',
'TGVOIP_WINXP_COMPAT',
'WEBRTC_WIN',
],
'libraries': [
'winmm',
'ws2_32',
'kernel32',
'user32',
],
'msvs_cygwin_shell': 0,
'msvs_settings': {
'VCCLCompilerTool': {
'ProgramDataBaseFileName': '$(OutDir)\\$(ProjectName).pdb',
'DebugInformationFormat': '3', # Program Database (/Zi)
'AdditionalOptions': [
'/MP', # Enable multi process build.
'/EHsc', # Catch C++ exceptions only, extern C functions never throw a C++ exception.
'/wd4068', # Disable "warning C4068: unknown pragma"
],
'TreatWChar_tAsBuiltInType': 'false',
},
},
'msvs_external_builder_build_cmd': [
'ninja.exe',
'-C',
'$(OutDir)',
'-k0',
'$(ProjectName)',
],
'configurations': {
'Debug': {
'defines': [
'_DEBUG',
],
'include_dirs': [
'<(DEPTH)/../../../Libraries/openssl/Debug/include',
],
'msvs_settings': {
'VCCLCompilerTool': {
'Optimization': '0', # Disabled (/Od)
'RuntimeLibrary': '1', # Multi-threaded Debug (/MTd)
'RuntimeTypeInfo': 'true',
},
'VCLibrarianTool': {
'AdditionalOptions': [
'/NODEFAULTLIB:LIBCMT'
]
}
},
},
'Release': {
'defines': [
'NDEBUG',
],
'include_dirs': [
'<(DEPTH)/../../../Libraries/openssl/Release/include',
],
'msvs_settings': {
'VCCLCompilerTool': {
'Optimization': '2', # Maximize Speed (/O2)
'InlineFunctionExpansion': '2', # Any suitable (/Ob2)
'EnableIntrinsicFunctions': 'true', # Yes (/Oi)
'FavorSizeOrSpeed': '1', # Favor fast code (/Ot)
'RuntimeLibrary': '0', # Multi-threaded (/MT)
'EnableEnhancedInstructionSet': '2', # Streaming SIMD Extensions 2 (/arch:SSE2)
'WholeProgramOptimization': 'true', # /GL
},
'VCLibrarianTool': {
'AdditionalOptions': [
'/LTCG',
]
},
},
},
},
},
],
[
'"<(OS)" == "linux"', {
'defines': [
'WEBRTC_POSIX',
'WEBRTC_LINUX',
],
'conditions': [
[ '"<!(uname -m)" == "i686"', {
'cflags_cc': [
'-msse2',
],
}]
],
'direct_dependent_settings': {
'libraries': [
],
},
},
],
],
},
],
}
| 70.828447
| 115
| 0.686953
| 8,316
| 65,233
| 4.960197
| 0.072751
| 0.153167
| 0.204223
| 0.262261
| 0.910107
| 0.889015
| 0.842881
| 0.836335
| 0.807389
| 0.765425
| 0
| 0.005242
| 0.175264
| 65,233
| 920
| 116
| 70.905435
| 0.761468
| 0.026612
| 0
| 0.092446
| 0
| 0
| 0.777718
| 0.75739
| 0
| 0
| 0
| 0.001087
| 0.002255
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
6fea1631be61066700f0c3848bab3df9b59a11bb
| 3,047
|
py
|
Python
|
seated/__init__.py
|
matteoferla/EasyJet_seat_message
|
70666cd1bb9f0752a3ceb0d910b9f49b63d2c420
|
[
"MIT"
] | 1
|
2021-06-04T10:21:03.000Z
|
2021-06-04T10:21:03.000Z
|
seated/__init__.py
|
matteoferla/EasyJet_seat_message
|
70666cd1bb9f0752a3ceb0d910b9f49b63d2c420
|
[
"MIT"
] | null | null | null |
seated/__init__.py
|
matteoferla/EasyJet_seat_message
|
70666cd1bb9f0752a3ceb0d910b9f49b63d2c420
|
[
"MIT"
] | null | null | null |
from .square import Square, Border, Line
pattern = [
[
Square(Border.inner, Line.square), # first row headrest and tray
Square(Border.outer, Line.BL),
Square(Border.outer, Line.BR),
Square(Border.none, Line.TR),
Square(Border.outer, Line.square),
Square(Border.outer, Line.BL),
Square(Border.inner, Line.square),
Square(Border.outer, Line.TL), # under toggle
Square(Border.none, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.none, Line.BL),
Square(Border.outer, Line.TR),
],[
Square(Border.inner, Line.square), # seocnd row
Square(Border.outer, Line.TR),
Square(Border.none, Line.BL),
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.outer, Line.TR),
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.none, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.inner, Line.TL),
Square(Border.outer, Line.TL) # end of sequence
],[
Square(Border.outer, Line.TL), # third head
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.outer, Line.BR),
Square(Border.outer, Line.BL),
Square(Border.none, Line.BR), #4th tray
Square(Border.outer, Line.square),
Square(Border.outer, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.outer, Line.BL),
Square(Border.none, Line.BL) #sequence repeats
],[
Square(Border.none, Line.TR),
Square(Border.outer, Line.square),
Square(Border.outer, Line.BL),
Square(Border.inner, Line.square),
Square(Border.outer, Line.TL),
Square(Border.none, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.none, Line.BL),
Square(Border.outer, Line.TR),
Square(Border.inner, Line.square),
Square(Border.outer, Line.BL),
Square(Border.outer, Line.BR)
], [
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.none, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.inner, Line.TL),
Square(Border.outer, Line.TL),
Square(Border.inner, Line.square),
Square(Border.outer, Line.TR),
Square(Border.none, Line.BL),
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.outer, Line.TR)
], [
Square(Border.outer, Line.square),
Square(Border.outer, Line.BR),
Square(Border.outer, Line.TR),
Square(Border.none, Line.TL),
Square(Border.outer, Line.BR),
Square(Border.none, Line.BR),
Square(Border.outer, Line.TL),
Square(Border.outer, Line.TL),
Square(Border.none, Line.TL),
Square(Border.outer, Line.BR),
Square(Border.outer, Line.BL),
Square(Border.none, Line.BR)
]
]
| 36.710843
| 72
| 0.590745
| 385
| 3,047
| 4.675325
| 0.072727
| 0.486667
| 0.396667
| 0.49
| 0.934444
| 0.901111
| 0.901111
| 0.901111
| 0.901111
| 0.872778
| 0
| 0.000443
| 0.259271
| 3,047
| 83
| 73
| 36.710843
| 0.797076
| 0.033476
| 0
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012195
| 0
| 0.012195
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
82f836ba9094e82a27a7c1ac57b1e409b89a3cad
| 99
|
py
|
Python
|
test.py
|
Aaron-Jin-Xu/DataIO
|
9713807e83e06e296c9e28014553e095014be642
|
[
"MIT"
] | 1
|
2020-04-19T22:48:44.000Z
|
2020-04-19T22:48:44.000Z
|
test.py
|
jinxu06/DataIO
|
9713807e83e06e296c9e28014553e095014be642
|
[
"MIT"
] | null | null | null |
test.py
|
jinxu06/DataIO
|
9713807e83e06e296c9e28014553e095014be642
|
[
"MIT"
] | null | null | null |
from utils.io import inspect_data_dirs
from configs import DATA_DIRS
inspect_data_dirs(DATA_DIRS)
| 19.8
| 38
| 0.868687
| 17
| 99
| 4.705882
| 0.470588
| 0.4
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10101
| 99
| 4
| 39
| 24.75
| 0.898876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d22804b8c1564f8a22d48050820c7419b0ef587b
| 472
|
py
|
Python
|
main.py
|
tgaspe/Python-Workshop
|
d9d116303ae3fe8cff0dd18b124cba998c71fa7e
|
[
"MIT"
] | null | null | null |
main.py
|
tgaspe/Python-Workshop
|
d9d116303ae3fe8cff0dd18b124cba998c71fa7e
|
[
"MIT"
] | null | null | null |
main.py
|
tgaspe/Python-Workshop
|
d9d116303ae3fe8cff0dd18b124cba998c71fa7e
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
def main_wrapper():
print("This is the start of our project. This function's name is {Main_wrapper.__name__}")
# Stuff here
#
print("this is the end of the project")
if __name__ == "__main__":
=======
def main_wrapper():
print("This is the start of our project. This function's name is {Main_wrapper.__name__}")
print("this is the end of the project")
if __name__ == "__main__":
>>>>>>> origin/main
main_wrapper()
| 23.6
| 94
| 0.633475
| 67
| 472
| 4.029851
| 0.298507
| 0.203704
| 0.162963
| 0.207407
| 0.874074
| 0.874074
| 0.874074
| 0.874074
| 0.874074
| 0.874074
| 0
| 0
| 0.222458
| 472
| 20
| 95
| 23.6
| 0.735695
| 0.021186
| 0
| 0.666667
| 0
| 0
| 0.517391
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d23ea3e9a46fdb86766bef2688e1bd7ff94ee820
| 13,688
|
py
|
Python
|
python/seldon_deploy_sdk/api/environment_api.py
|
adriangonz/seldon-deploy-sdk
|
c5504838630a87053387cec57ec2e1e7251971e2
|
[
"Apache-2.0"
] | 6
|
2021-02-18T14:37:54.000Z
|
2022-01-13T13:27:43.000Z
|
python/seldon_deploy_sdk/api/environment_api.py
|
adriangonz/seldon-deploy-sdk
|
c5504838630a87053387cec57ec2e1e7251971e2
|
[
"Apache-2.0"
] | 14
|
2021-01-04T16:32:03.000Z
|
2021-12-13T17:53:59.000Z
|
python/seldon_deploy_sdk/api/environment_api.py
|
adriangonz/seldon-deploy-sdk
|
c5504838630a87053387cec57ec2e1e7251971e2
|
[
"Apache-2.0"
] | 7
|
2021-03-17T09:05:55.000Z
|
2022-01-05T10:39:56.000Z
|
# coding: utf-8
"""
Seldon Deploy API
API to interact and manage the lifecycle of your machine learning models deployed through Seldon Deploy. # noqa: E501
OpenAPI spec version: v1alpha1
Contact: hello@seldon.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from seldon_deploy_sdk.api_client import ApiClient
class EnvironmentApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def read_cluster(self, **kwargs): # noqa: E501
"""read_cluster # noqa: E501
Read the cluster info # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_cluster_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.read_cluster_with_http_info(**kwargs) # noqa: E501
return data
def read_cluster_with_http_info(self, **kwargs): # noqa: E501
"""read_cluster # noqa: E501
Read the cluster info # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ClusterInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/cluster', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_health_check(self, **kwargs): # noqa: E501
"""read_health_check # noqa: E501
Read the healthcheck # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_health_check(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_health_check_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.read_health_check_with_http_info(**kwargs) # noqa: E501
return data
def read_health_check_with_http_info(self, **kwargs): # noqa: E501
"""read_health_check # noqa: E501
Read the healthcheck # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_health_check_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_health_check" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/healthcheck', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HealthCheckInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_user(self, **kwargs): # noqa: E501
"""read_user # noqa: E501
Read the request user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_user(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_user_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.read_user_with_http_info(**kwargs) # noqa: E501
return data
def read_user_with_http_info(self, **kwargs): # noqa: E501
"""read_user # noqa: E501
Read the request user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_user_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: UserInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_user" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_version(self, **kwargs): # noqa: E501
"""read_version # noqa: E501
Read the version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_version(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VersionInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_version_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.read_version_with_http_info(**kwargs) # noqa: E501
return data
def read_version_with_http_info(self, **kwargs): # noqa: E501
"""read_version # noqa: E501
Read the version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_version_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VersionInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_version" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['OAuth2'] # noqa: E501
return self.api_client.call_api(
'/version', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 34.305764
| 122
| 0.595923
| 1,537
| 13,688
| 5.035133
| 0.100195
| 0.058922
| 0.028944
| 0.037214
| 0.916139
| 0.913813
| 0.912392
| 0.902571
| 0.896369
| 0.891717
| 0
| 0.019487
| 0.313925
| 13,688
| 398
| 123
| 34.39196
| 0.8046
| 0.31955
| 0
| 0.8
| 0
| 0
| 0.145437
| 0.030964
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043902
| false
| 0
| 0.019512
| 0
| 0.126829
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d24860653e1cbd5bcb1cde53a2a1f80220751e50
| 208
|
py
|
Python
|
solution/v3/model/Response.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | 1
|
2020-01-01T22:28:00.000Z
|
2020-01-01T22:28:00.000Z
|
solution/v3/model/Response.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | null | null | null |
solution/v3/model/Response.py
|
mjastad/automation
|
f84742dc044954484679243fc51bd5eb2c660d44
|
[
"CC-BY-3.0"
] | 4
|
2018-01-23T15:09:04.000Z
|
2020-02-11T20:15:34.000Z
|
class Response:
def __init__(self, inst):
self.instance = inst
@property
def id(self):
return self.instance['status']
@property
def status(self):
return self.instance['status']
| 13.866667
| 36
| 0.644231
| 25
| 208
| 5.2
| 0.44
| 0.276923
| 0.215385
| 0.338462
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235577
| 208
| 14
| 37
| 14.857143
| 0.81761
| 0
| 0
| 0.444444
| 0
| 0
| 0.058252
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.222222
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
962e5521d0151e0893feff10b2daf9078ef1b6ec
| 154,160
|
py
|
Python
|
phyprof/plot_hpx_updates.py
|
rtohid/profiling
|
1fb3a15793dcea79cd28ccbf6dca31a083634f14
|
[
"BSL-1.0"
] | null | null | null |
phyprof/plot_hpx_updates.py
|
rtohid/profiling
|
1fb3a15793dcea79cd28ccbf6dca31a083634f14
|
[
"BSL-1.0"
] | null | null | null |
phyprof/plot_hpx_updates.py
|
rtohid/profiling
|
1fb3a15793dcea79cd28ccbf6dca31a083634f14
|
[
"BSL-1.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__license__ = """
Copyright (c) 2020 Shahrzad Shirzad
Copyright (c) 2020 R. Tohid
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
"""
import csv
import datetime
import glob
import math
import matplotlib.tri as mtri
import numpy as np
import random
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from mpl_toolkits import mplot3d
now = datetime.datetime.now()
date_str=now.strftime("%Y-%m-%d-%H%M")
simdsize=4
#hpx_dir='/home/shahrzad/repos/Blazemark/data/matrix/dmatdmatadd/01-04-2019-1027'
hpx_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/marvin/'
openmp_dir_2='/home/shahrzad/repos/Blazemark/data/openmp/04-27-2019/'
openmp_dir_1='/home/shahrzad/repos/Blazemark/data/openmp/all/'
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/matrix/06-13-2019/marvin'
def create_dict(directory):
thr=[]
repeats=[]
data_files=glob.glob(directory+'/*.dat')
benchmark=''
benchmarks=[]
chunk_sizes=[]
block_sizes=[]
for filename in data_files:
(repeat, benchmark, th, runtime, chunk_size, block_size_row, block_size_col) = filename.split('/')[-1].replace('.dat','').split('-')
if benchmark not in benchmarks:
benchmarks.append(benchmark)
if int(th) not in thr:
thr.append(int(th))
if int(repeat) not in repeats:
repeats.append(int(repeat))
if int(chunk_size) not in chunk_sizes:
chunk_sizes.append(int(chunk_size))
if block_size_row+'-'+block_size_col not in block_sizes:
block_sizes.append(block_size_row+'-'+block_size_col)
thr.sort()
benchmarks.sort()
repeats.sort()
chunk_sizes.sort()
block_sizes.sort()
mat_sizes={}
d_all={}
d={}
for benchmark in benchmarks:
d_all[benchmark]={}
d[benchmark]={}
for th in thr:
d_all[benchmark][th]={}
d[benchmark][th]={}
for r in repeats:
d_all[benchmark][th][r]={}
for bs in block_sizes:
d_all[benchmark][th][r][bs]={}
d[benchmark][th][bs]={}
for cs in chunk_sizes:
d_all[benchmark][th][r][bs][cs]={}
d[benchmark][th][bs][cs]={}
data_files.sort()
for filename in data_files:
stop=False
f=open(filename, 'r')
result=f.readlines()[3:]
benchmark=filename.split('/')[-1].split('-')[1]
th=int(filename.split('/')[-1].split('-')[2])
repeat=int(filename.split('/')[-1].split('-')[0])
chunk_size=int(filename.split('/')[-1].split('-')[4])
block_size=filename.split('/')[-1].split('-')[5]+'-'+filename.split('/')[-1].split('-')[6][0:-4]
size=[]
mflops=[]
for r in result:
if "N=" in r:
stop=True
if not stop:
size.append(int(r.strip().split(' ')[0]))
mflops.append(float(r.strip().split(' ')[-1]))
d_all[benchmark][th][repeat][block_size][chunk_size]['size']=size
d_all[benchmark][th][repeat][block_size][chunk_size]['mflops']=mflops
if 'size' not in d[benchmark][th][block_size][chunk_size].keys():
d[benchmark][th][block_size][chunk_size]['size']=size
d[benchmark][th][block_size][chunk_size]['mflops']=[0]*len(size)
if len(repeats)==1 and repeat==1:
d[benchmark][th][block_size][chunk_size]['mflops']=mflops
elif len(repeats)>1 and repeat!=1:
d[benchmark][th][block_size][chunk_size]['mflops']+=mflops/(len(repeats)-1)
else:
print("errrrrorrrrrrrrrrrr")
if benchmark not in mat_sizes.keys():
mat_sizes[benchmark]=size
return (d, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)
#######################################################
def create_dict_relative(directory):
thr=[]
repeats=[]
data_files=glob.glob(directory+'/*.dat')
benchmark=''
benchmarks=[]
chunk_sizes=[]
block_sizes={}
mat_sizes={}
for filename in data_files:
(repeat, benchmark, th, runtime, chunk_size, block_size_row, block_size_col, mat_size) = filename.split('/')[-1].replace('.dat','').split('-')
mat_size=mat_size.split(',')[0]
if benchmark not in benchmarks:
benchmarks.append(benchmark)
mat_sizes[benchmark]=[]
block_sizes[benchmark]=[]
if int(mat_size) not in mat_sizes[benchmark]:
mat_sizes[benchmark].append(int(mat_size))
if int(th) not in thr:
thr.append(int(th))
if int(repeat) not in repeats:
repeats.append(int(repeat))
if block_size_row+'-'+block_size_col not in block_sizes[benchmark]:
block_sizes[benchmark].append(block_size_row+'-'+block_size_col)
if int(chunk_size) not in chunk_sizes:
chunk_sizes.append(int(chunk_size))
thr.sort()
repeats.sort()
chunk_sizes.sort()
benchmarks.sort()
d_all={}
d={}
for benchmark in benchmarks:
mat_sizes[benchmark].sort()
block_sizes[benchmark].sort()
d_all[benchmark]={}
d[benchmark]={}
for th in thr:
d_all[benchmark][th]={}
d[benchmark][th]={}
for r in repeats:
d_all[benchmark][th][r]={}
for bs in block_sizes[benchmark]:
d_all[benchmark][th][r][bs]={}
d[benchmark][th][bs]={}
for cs in chunk_sizes:
d_all[benchmark][th][r][bs][cs]={}
d[benchmark][th][bs][cs]={}
d[benchmark][th][bs][cs]['size']=mat_sizes[benchmark]
d[benchmark][th][bs][cs]['mflops']=[0]*len(mat_sizes[benchmark])
d_all[benchmark][th][r][bs][cs]['size']=mat_sizes[benchmark]
d_all[benchmark][th][r][bs][cs]['mflops']=[0]*len(mat_sizes[benchmark])
data_files.sort()
for filename in data_files:
stop=False
f=open(filename, 'r')
result=f.readlines()[3:]
benchmark=filename.split('/')[-1].split('-')[1]
th=int(filename.split('/')[-1].split('-')[2])
repeat=int(filename.split('/')[-1].split('-')[0])
chunk_size=int(filename.split('/')[-1].split('-')[4])
block_size=filename.split('/')[-1].split('-')[5]+'-'+filename.split('/')[-1].split('-')[6].replace('.dat','')
for r in result:
if "N=" in r:
stop=True
if not stop:
s=mat_sizes[benchmark].index(int(r.strip().split(' ')[0]))
d_all[benchmark][th][repeat][block_size][chunk_size]['mflops'][s]=float(r.strip().split(' ')[-1])
# if 'size' not in d[benchmark][th][block_size][chunk_size].keys():
# d[benchmark][th][block_size][chunk_size]['size']=size
# d[benchmark][th][block_size][chunk_size]['mflops']=[0]*len(size)
if len(repeats)==1 and repeat==1:
d[benchmark][th][block_size][chunk_size]['mflops']=d_all[benchmark][th][repeat][block_size][chunk_size]['mflops']
elif len(repeats)>1 and repeat!=1:
d[benchmark][th][block_size][chunk_size]['mflops']+=d_all[benchmark][th][repeat][block_size][chunk_size]['mflops']/(len(repeats)-1)
else:
print("errrrrorrrrrrrrrrrr")
return (d, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)
###########################################################################
def create_dict_relative_norepeat(directories):
thr={}
data_files=[]
for directory in directories:
[data_files.append(i) for i in glob.glob(directory+'/*.dat')]
benchmark=''
benchmarks=[]
chunk_sizes=[]
block_sizes={}
mat_sizes={}
nodes=[]
for filename in data_files:
(node, benchmark, th, runtime, chunk_size, block_size_row, block_size_col, mat_size) = filename.split('/')[-1].replace('.dat','').split('-')
mat_size=mat_size.split(',')[0]
if benchmark not in benchmarks:
benchmarks.append(benchmark)
mat_sizes[benchmark]=[]
block_sizes[benchmark]=[]
if node not in nodes:
thr[node]=[]
if int(mat_size) not in mat_sizes[benchmark]:
mat_sizes[benchmark].append(int(mat_size))
if int(th) not in thr[node]:
thr[node].append(int(th))
if block_size_row+'-'+block_size_col not in block_sizes[benchmark]:
block_sizes[benchmark].append(block_size_row+'-'+block_size_col)
if chunk_size=='':
chunk_size=-1
if int(chunk_size) not in chunk_sizes:
chunk_sizes.append(int(chunk_size))
if node not in nodes:
nodes.append(node)
[thr[node].sort() for node in thr.keys()]
nodes.sort()
chunk_sizes.sort()
benchmarks.sort()
d={}
for node in nodes:
d[node]={}
for benchmark in benchmarks:
mat_sizes[benchmark].sort()
block_sizes[benchmark].sort()
d[node][benchmark]={}
for th in thr[node]:
d[node][benchmark][th]={}
for bs in block_sizes[benchmark]:
d[node][benchmark][th][bs]={}
for cs in chunk_sizes:
d[node][benchmark][th][bs][cs]={}
d[node][benchmark][th][bs][cs]['size']=mat_sizes[benchmark]
d[node][benchmark][th][bs][cs]['mflops']=[0]*len(mat_sizes[benchmark])
data_files.sort()
for filename in data_files:
stop=False
f=open(filename, 'r')
result=f.readlines()[3:]
(node, benchmark, th, runtime, chunk_size, block_size_row, block_size_col, mat_size) = filename.split('/')[-1].replace('.dat','').split('-')
th=int(th)
if chunk_size=='':
chunk_size=-1
chunk_size=int(chunk_size)
for r in result:
if "N=" in r:
stop=True
if not stop:
s=mat_sizes[benchmark].index(int(r.strip().split(' ')[0]))
d[node][benchmark][th][block_size_row+'-'+block_size_col][chunk_size]['mflops'][s]=float(r.strip().split(' ')[-1])
return (d, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)
###########################################################################
def create_dict_relative_norepeat_counters_onebyone(directory):
thr=[]
data_files=glob.glob(directory+'/*.dat')
benchmark=''
benchmarks=[]
chunk_sizes=[]
block_sizes={}
mat_sizes={}
nodes=[]
for filename in data_files:
(node, benchmark, th, runtime, chunk_size, block_size_row, block_size_col, mat_size) = filename.split('/')[-1].replace('.dat','').split('-')
mat_size=mat_size.split(',')[0]
if benchmark not in benchmarks:
benchmarks.append(benchmark)
mat_sizes[benchmark]=[]
block_sizes[benchmark]=[]
if int(mat_size) not in mat_sizes[benchmark]:
mat_sizes[benchmark].append(int(mat_size))
if int(th) not in thr:
thr.append(int(th))
if block_size_row+'-'+block_size_col not in block_sizes[benchmark]:
block_sizes[benchmark].append(block_size_row+'-'+block_size_col)
if int(chunk_size) not in chunk_sizes:
chunk_sizes.append(int(chunk_size))
if node not in nodes:
nodes.append(node)
thr.sort()
nodes.sort()
chunk_sizes.sort()
benchmarks.sort()
repeats=5
d={}
for node in nodes:
d[node]={}
for benchmark in benchmarks:
mat_sizes[benchmark].sort()
block_sizes[benchmark].sort()
d[node][benchmark]={}
for th in thr:
d[node][benchmark][th]={}
for bs in block_sizes[benchmark]:
d[node][benchmark][th][bs]={}
for cs in chunk_sizes:
d[node][benchmark][th][bs][cs]={}
d[node][benchmark][th][bs][cs]['size']=mat_sizes[benchmark]
d[node][benchmark][th][bs][cs]['mflops']=[0]*len(mat_sizes[benchmark])
d[node][benchmark][th][bs][cs]['counters']=[0]*len(mat_sizes[benchmark])
data_files.sort()
for filename in data_files:
f=open(filename, 'r')
results=f.read()
(node, benchmark, th, runtime, chunk_size, block_size_row, block_size_col, mat_size) = filename.split('/')[-1].replace('.dat','').split('-')
th=int(th)
cs=int(chunk_size)
counters_avg={'idle_rate':[0]*th, 'average_time':[0]*th, 'cumulative_overhead_time':[0]*th, 'cumulative_count':[0]*th, 'average_overhead_time':[0]*th, 'papi_tca':[0]*th, 'papi_tcm':[0]*th}
s=mat_sizes[benchmark].index(int(mat_size))
bs=block_size_row+'-'+block_size_col
mflops=float((results.split(' '+mat_size+' ')[1].split('\n')[0]).strip())
d[node][benchmark][th][bs][cs]['mflops'][s]=mflops
s=mat_sizes[benchmark].index(int(mat_size))
d[node][benchmark][th][bs][cs]['counters'][s]={}
d[node][benchmark][th][bs][cs]['counters'][s]['ind']=[]
d[node][benchmark][th][bs][cs]['counters'][s]['avg']={}
reps=results.split('Done')[1:]
for rep in reps[1:-1]:
counters_ind={'idle_rate':[0]*th, 'average_time':[0]*th, 'cumulative_overhead_time':[0]*th, 'cumulative_count':[0]*th, 'average_overhead_time':[0]*th,'papi_tca':[0]*th, 'papi_tcm':[0]*th}
rep_lines=rep.split('Initialization')[0].split('\n')
for r in rep_lines:
if 'idle-rate' in r and 'pool' in r:
idle_rate=float(r.strip().split(',')[-2])/100
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['idle_rate'][th_num]=idle_rate
counters_avg['idle_rate'][th_num]+=idle_rate
elif 'cumulative-overhead' in r and 'pool' in r:
cumulative_overhead=float(r.strip().split(',')[-2])/1000
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['cumulative_overhead_time'][th_num]=cumulative_overhead
counters_avg['cumulative_overhead_time'][th_num]+=cumulative_overhead
elif 'average-overhead' in r and 'pool' in r:
average_overhead=float(r.strip().split(',')[-2])/1000
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['average_overhead_time'][th_num]=average_overhead
counters_avg['average_overhead_time'][th_num]+=average_overhead
elif 'average,' in r and 'pool' in r:
average_time=float(r.strip().split(',')[-2])/1000
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['average_time'][th_num]=average_time
counters_avg['average_time'][th_num]+=average_time
elif 'cumulative,' in r and 'pool' in r:
cumulative=float(r.strip().split(',')[-1])
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['cumulative_count'][th_num]=cumulative
counters_avg['cumulative_count'][th_num]+=cumulative
elif 'PAPI_L2_TCA' in r :
papi_tca=float(r.strip().split(',')[-1])
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['papi_tca'][th_num]=papi_tca
counters_avg['papi_tca'][th_num]+=papi_tca
elif 'PAPI_L2_TCM' in r :
papi_tca=float(r.strip().split(',')[-1])
th_num=int(r.strip().split('thread#')[1].split('}')[0])
counters_ind['papi_tcm'][th_num]=papi_tca
counters_avg['papi_tcm'][th_num]+=papi_tca
d[node][benchmark][th][bs][cs]['counters'][s]['ind'].append(counters_ind)
for counter in counters_avg.keys():
counters_avg[counter]=[counters_avg[counter][thread]/repeats for thread in range(th)]
d[node][benchmark][th][bs][cs]['counters'][s]['avg']=counters_avg
return (d, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)
#############################################################################
def create_dict_openmp(directory):
thr=[]
repeats=[]
data_files=glob.glob(directory+'/*.dat')
benchmark=''
benchmarks=[]
for filename in data_files:
(repeat, benchmark, th, runtime) = filename.split('/')[-1].replace('.dat','').split('-')
if benchmark not in benchmarks:
benchmarks.append(benchmark)
if int(th) not in thr:
thr.append(int(th))
if int(repeat) not in repeats:
repeats.append(int(repeat))
thr.sort()
benchmarks.sort()
repeats.sort()
d_all={}
d={}
for benchmark in benchmarks:
d_all[benchmark]={}
d[benchmark]={}
for th in thr:
d_all[benchmark][th]={}
d[benchmark][th]={}
for r in repeats:
d_all[benchmark][th][r]={}
data_files.sort()
for filename in data_files:
stop=False
f=open(filename, 'r')
result=f.readlines()[3:]
benchmark=filename.split('/')[-1].split('-')[1]
th=int(filename.split('/')[-1].split('-')[2])
repeat=int(filename.split('/')[-1].split('-')[0])
size=[]
mflops=[]
for r in result:
if "N=" in r:
stop=True
if not stop:
size.append(int(r.strip().split(' ')[0]))
mflops.append(float(r.strip().split(' ')[-1]))
d_all[benchmark][th][repeat]['size']=size
d_all[benchmark][th][repeat]['mflops']=mflops
for benchmark in benchmarks:
for th in thr:
d[benchmark][th]['size']=d_all[benchmark][th][1]['size']
mflops=[0]*len(d[benchmark][th]['size'])
if max(repeats)==1:
if 'mflops' in d_all[benchmark][th][1].keys():
mflops=d_all[benchmark][th][repeats[0]]['mflops']
d[benchmark][th]['mflops']=mflops
else:
for r in repeats[1:]:
mflops=[mflops[i]+d_all[benchmark][th][r]['mflops'][i] for i in range(len(mflops))]
d[benchmark][th]['mflops']=[x/float(max(repeats)-1) for x in mflops]
return d
def create_dict_reference(directory,runtime='openmp'):
thr=[]
nodes=[]
# data_files=glob.glob(directory+'/*.dat')
data_files=[]
[data_files.append(i) for i in glob.glob(directory+'/*.dat') if runtime in i.split('/')[-1]]
benchmark=''
benchmarks=[]
for filename in data_files:
try:
(node, benchmark, th) = filename.split('/')[-1].replace('.dat','').split('-')
except:
(node, benchmark, th, ref) = filename.split('/')[-1].replace('.dat','').split('-')
if benchmark not in benchmarks:
benchmarks.append(benchmark)
if int(th) not in thr:
thr.append(int(th))
if node not in nodes:
nodes.append(node)
thr.sort()
benchmarks.sort()
nodes.sort()
d_all={}
for node in nodes:
d_all[node]={}
for benchmark in benchmarks:
d_all[node][benchmark]={}
for th in thr:
d_all[node][benchmark][th]={}
data_files.sort()
for filename in data_files:
stop=False
f=open(filename, 'r')
result=f.readlines()[3:]
try:
(node, benchmark, th) = filename.split('/')[-1].replace('.dat','').split('-')
except:
(node, benchmark, th, ref) = filename.split('/')[-1].replace('.dat','').split('-')
th = int(th)
if th not in [10,11]:
size=[]
mflops=[]
for r in result:
if "N=" in r or '/' in r:
stop=True
if not stop:
size.append(int(r.strip().split(' ')[0]))
mflops.append(float(r.strip().split(' ')[-1]))
d_all[node][benchmark][th]['size']=size
d_all[node][benchmark][th]['mflops']=mflops
return d_all
########################################################################################
hpx_dir_ref='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/reference_hpx/marvin/master/'
d_hpx_ref=create_dict(hpx_dir_ref)
hpx_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/marvin/'
hpx_dir1='/home/shahrzad/repos/Blazemark/data/matrix/09-15-2019/'
hpx_dir2='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/trillian/'
hpx_dir3='/home/shahrzad/repos/Blazemark/results/new_threads/'
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([hpx_dir,hpx_dir1,hpx_dir2])
hpxmp_dir='/home/shahrzad/repos/Blazemark/data/hpxmp/CCGRID20-Nov/'
d_hpxmp=create_dict_reference(hpxmp_dir,'hpx')
d_openmp=create_dict_reference(hpxmp_dir)
openmp_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/openmp/'
(d_openmp, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([openmp_dir])
d_openmp=create_dict_openmp(openmp_dir_1)
i=1
for benchmark in benchmarks:
for th in thr:
plt.figure(i)
for a in ['4']:
for b in d_hpx[node][benchmark][th].keys():# block_sizes:
if b.startswith(a+'-'):
for c in d_hpx[node][benchmark][th][b].keys():#chunk_sizes:
if d_hpx[node][benchmark][th][b][c]['mflops'].count(0)<0.5*len(d_hpx[node][benchmark][th][b][c]['mflops']):
plt.plot(d_hpx[node][benchmark][th][b][c]['size'], d_hpx[node][benchmark][th][b][c]['mflops'],label='chunk_size: '+str(c)+' block_size: '+str(b)+ ' '+str(th)+' threads',marker='*')
# plt.plot(d_openmp[benchmark][th]['size'], d_openmp[benchmark][th]['mflops'],label='openmp '+str(th)+' threads')
plt.xlabel("# matrix size")
plt.ylabel('MFlops')
plt.xscale('log')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.title('hpx '+benchmark)
i=i+1
plt.figure(i)
# plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
#plt.show()
#pp.close()
#f=open('/home/shahrzad/repos/Blazemark/data/data.csv','w')
#f_writer=csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
#f_writer.writerow(['benchmark','matrix_size','num_threads','block_size_row','block_size_col','chunk_size','num_blocks','mflops'])
#for benchmark in d_hpx.keys():
# for th in d_hpx[benchmark].keys():
# for block_size in d_hpx[benchmark][th].keys():
# for chunk_size in d_hpx[benchmark][th][block_size].keys():
# if len(d_hpx[benchmark][th][block_size][chunk_size])!=0:
# for j in range(len(d_hpx[benchmark][th][block_size][chunk_size]['size'])):
# m=d_hpx[benchmark][th][block_size][chunk_size]['size'][j]
# b_r=int(b.split('-')[0])
# b_c=int(b.split('-')[1])
# rest1=b_r%simdsize
# rest2=b_c%simdsize
# if b_r>m:
# b_r=m
# if b_c>m:
# b_c=m
# if b_c%simdsize!=0:
# b_c=b_c+simdsize-b_c%simdsize
# equalshare1=math.ceil(m/b_r)
# equalshare2=math.ceil(m/b_c)
# for i in range(len(d_hpx[benchmark][th][block_size][chunk_size]['size'])):
# f_writer.writerow([benchmark,str(m),str(th),str(b_r), str(b_c),str(chunk_size),str(equalshare1*equalshare2), str(d_hpx[benchmark][th][block_size][chunk_size]['mflops'][i])])
#f.close()
#
(d_hpx_old, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes_2)=create_dict('/home/shahrzad/repos/Blazemark/results/previous')
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict(hpx_dir)
hpx_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/marvin'
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative(hpx_dir)
papi_directory='/home/shahrzad/repos/Blazemark/data/matrix/08-07-2019/performance_counters/'
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat_counters_onebyone(papi_directory)
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/matrix/08-07-2019/performance_counters'
###########################################################################
#performnce counters plots
#############################################################################
i=1
th=4
m=912
benchmark='dmatdmatadd'
node='marvin'
#plot number of cache misses based on chunk size for a matrix size
for benchmark in benchmarks:
for th in d_hpx[node][benchmark].keys():
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
plt.figure(i)
for b in d_hpx[node][benchmark][th].keys():
results=[]
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
chunk_sizes=[]
grain_sizes=[]
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
l2_miss_rate.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)])
if len(chunk_sizes)!=0:
t0=[l[0] for l in l2_miss_rate]
t1=[l[1] for l in l2_miss_rate]
t2=[l[2] for l in l2_miss_rate]
t3=[l[3] for l in l2_miss_rate]
plt.figure(i)
plt.plot(chunk_sizes, t0, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b)+' core 0')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+1)
plt.plot(chunk_sizes, t1, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 1')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+2)
plt.plot(chunk_sizes, t2, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 2')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+3)
plt.plot(chunk_sizes, t3, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 3')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# t0=[l[0] for l in l2_ch]
# t1=[l[1] for l in l2_ch]
# t2=[l[2] for l in l2_ch]
# t3=[l[3] for l in l2_ch]
#
# plt.plot(chunk_sizes, t0, label='0')
# plt.plot(chunk_sizes, t1, label='1')
# plt.plot(chunk_sizes, t2, label='2')
# plt.plot(chunk_sizes, t3, label='3')
# plt.ylabel('number of cache_hits')
# plt.xscale('log')
# plt.title(benchmark)
# plt.grid(True, 'both')
# plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.plot(chunk_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(grain_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.xlabel("grain_size(flop)")
#
## plt.xlabel("chunk_size")
# plt.ylabel('MFlops')
# plt.xscale('log')
# plt.title(benchmark)
# plt.grid(True, 'both')
# plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
#plot number of cache misses based on chunk size for a matrix size
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
for th in d_hpx[node][benchmark].keys():
plt.figure(i)
results=[]
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
chunk_sizes=[]
grain_sizes=[]
block_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(int(th))])
l2_miss_rate.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(int(th))])
block_sizes.append(b)
t0=[l[0] for l in l2_miss_rate]
t1=[l[1] for l in l2_miss_rate]
t2=[l[2] for l in l2_miss_rate]
t3=[l[3] for l in l2_miss_rate]
plt.figure(i)
plt.axes([0, 0, 2, 1])
plt.scatter(grain_sizes, t0, label=str(th)+' threads matrix_size:'+str(m)+' core 0')
plt.ylabel('l2_cache_misse rate')
plt.xlabel('Grain size')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+1)
plt.plot(chunk_sizes, t1, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 1')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+2)
plt.plot(chunk_sizes, t2, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 2')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+3)
plt.plot(chunk_sizes, t3, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 3')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
#plot number of cache misses based on chunk size/grain size for a matrix size
for benchmark in benchmarks:
for m in mat_sizes[benchmark]:
pp = PdfPages(perf_directory+'/cache_miss_'+node+'_'+benchmark+'_'+str(int(m))+'.pdf')
for th in d_hpx[node][benchmark].keys():
results=[]
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
chunk_sizes=[]
grain_sizes=[]
block_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(int(th))])
l2_miss_rate.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(int(th))])
block_sizes.append(b)
# t0=[l[0] for l in l2_miss_rate]
# t1=[l[1] for l in l2_miss_rate]
# t2=[l[2] for l in l2_miss_rate]
# t3=[l[3] for l in l2_miss_rate]
plt.figure(i)
# plt.axes([0, 0, 2, 1])
# plt.scatter(grain_sizes, t0, label=str(th)+' threads matrix_size:'+str(m)+' core 0')
# plt.ylabel('l2_cache_misse rate')
# plt.xlabel('Grain size')
# plt.xscale('log')
# plt.title(benchmark)
# plt.grid(True, 'both')
# plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.figure(i+1)
plt.axes([0, 0, 2, 1])
avg_l2=[sum(l)/th for l in l2_miss_rate]
plt.scatter(grain_sizes, [sum(l)/th for l in l2_miss_rate], label=str(th)+' threads matrix_size:'+str(m)+' core 0')
plt.ylabel('average l2_cache_misse rate')
plt.xlabel('Grain size')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
for e in range(len(l2_miss_rate)):
plt.annotate(block_sizes[e], # this is the text
(grain_sizes[e],avg_l2[e]), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0,10), # distance from text to points (x,y)
ha='center') # horizontal alignment can be left, right or center
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
plt.figure(i+1)
plt.plot(chunk_sizes, t1, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 1')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+2)
plt.plot(chunk_sizes, t2, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 2')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+3)
plt.plot(chunk_sizes, t3, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b_r)+'-'+str(b_c)+' core 3')
plt.ylabel('l2_cache_misse rate')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
###################################################################
#plot idle rate based on grain size for a matrix size
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
pp = PdfPages(perf_directory+'/idle_rate_'+node+'_'+benchmark+'_'+str(int(m))+'.pdf')
for th in d_hpx[node][benchmark].keys():
plt.figure(i)
results=[]
idle_rates=[]
chunk_sizes=[]
grain_sizes=[]
block_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
idle_rates.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['idle_rate'])
block_sizes.append(b)
i0=[sum(idl)/th for idl in idle_rates]
plt.axes([0, 0, 2, 1])
plt.scatter(grain_sizes, i0, label=str(th)+' threads matrix_size:'+str(m)+' core 0')
plt.ylabel('idle rate')
plt.xlabel('Grain size')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
for e in range(len(idle_rates)):
plt.annotate(block_sizes[i], # this is the text
(grain_sizes[e],i0[e]), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0,3), # distance from text to points (x,y)
ha='center') # horizontal alignment can be left, right or center
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat_counters_onebyone(papi_directory)
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/matrix/08-07-2019/performance_counters'
################################################################
#3d plot
################################################################
fig = plt.figure()
ax = plt.axes(projection='3d')
# Data for a three-dimensional line
zline = np.linspace(0, 15, 1000)
xline = np.sin(zline)
yline = np.cos(zline)
ax.plot3D(xline, yline, zline, 'gray')
# Data for three-dimensional scattered points
zdata = 15 * np.random.random(100)
xdata = np.sin(zdata) + 0.1 * np.random.randn(100)
ydata = np.cos(zdata) + 0.1 * np.random.randn(100)
ax.scatter3D(grain_sizes, block_sizes, results, c=results, cmap='Greens');
animation=0
#mflops based on block size and grain size
for node in ['marvin', 'trillian']:
for benchmark in benchmarks:
pp = PdfPages(perf_directory+'/3d_plot_'+benchmark+'_'+node+'.pdf')
for m in mat_sizes[benchmark]:
for th in d_hpx[node][benchmark].keys():
results=[]
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
avg_l2_miss_rate=[]
chunk_sizes=[]
real_block_sizes=[]
block_sizes=[]
grain_sizes=[]
bl=1
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
chunk_sizes.append(c)
block_sizes.append(bl)
if b not in real_block_sizes:
real_block_sizes.append(b)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
l2_miss_rate.append(ind_miss)
avg_l2_miss_rate.append(sum(ind_miss)/th)
bl=bl+1
y=block_sizes
x=grain_sizes
z=results
if not animation:
fig = plt.figure(i)
ax = fig.add_subplot(1,1,1, projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=-110)
ax.set_xlabel('Grain size')
ax.set_ylabel('Block size')
ax.set_zlabel('Mflops')
plt.title(benchmark+' matrix size:'+str(m)+' '+str(th)+' threads')
plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+1
else:
# surf=ax.plot_trisurf(y, x, z, cmap=plt.cm.viridis, linewidth=0.2)
# fig.colorbar( surf, shrink=0.5, aspect=5)
# ax.view_init(10, 60)
# plt.show()
for angle in range(0,360,10):
fig = plt.figure(i)
ax = fig.gca(projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=angle)
ax.set_xlabel('Grain size')
ax.set_ylabel('Block size')
ax.set_zlabel('Mflops')
plt.title(benchmark+' matrix size:'+str(m)+' '+str(th)+' threads')
filename='/home/shahrzad/repos/Blazemark/results/step_'+str(angle)+'.png'
plt.savefig(filename, dpi=96)
plt.gca()
if not animation:
plt.show()
pp.close()
animation=0
#mflops based on num_threads and grain size
for node in ['marvin', 'trillian']:
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/3d_plot_'+benchmark+'_'+node+'.pdf')
for m in mat_sizes[benchmark]:
threads=[]
results=[]
chunk_sizes=[]
real_block_sizes=[]
block_sizes=[]
grain_sizes=[]
for th in range(1,9): #d_hpx[node][benchmark].keys():
bl=1
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
chunk_sizes.append(c)
block_sizes.append(bl)
threads.append(th)
if b not in real_block_sizes:
real_block_sizes.append(b)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
bl=bl+1
y=threads
x=grain_sizes
z=results
if not animation:
fig = plt.figure(i)
ax = fig.add_subplot(1,1,1, projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=-110)
ax.set_xlabel('Grain size')
# ax.set_xticks(10**np.arange(1,7))
# ax.set_xticklabels(10**np.arange(1,7))
ax.set_ylabel('#cores')
ax.zaxis.set_rotate_label(False)
ax.set_zlabel('Mflops',rotation=90)
ax.get_proj = lambda: np.dot(Axes3D.get_proj(ax), np.diag([1.18, 1.1, 1, 1]))
# plt.title(node+' '+benchmark.upper()+' matrix size:'+str(int(m))+'x'+str(int(m)))
plt.savefig('/home/shahrzad/src/Dissertation/images/fig2.png', dpi=300)
print('')
i=i+1
else:
# surf=ax.plot_trisurf(y, x, z, cmap=plt.cm.viridis, linewidth=0.2)
# fig.colorbar( surf, shrink=0.5, aspect=5)
# ax.view_init(10, 60)
# plt.show()
for angle in range(0,360,10):
fig = plt.figure(i)
ax = fig.gca(projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=angle)
ax.set_xlabel('Grain size')
ax.set_ylabel('#cores')
ax.set_zlabel('Mflops',rotation=90)
ax.zaxis.set_rotate_label(False)
# ax.set_yticks(d_hpx[node][benchmark].keys())
# ax.set_xticklabels(d_hpx[node][benchmark].keys())
ax.get_proj = lambda: np.dot(Axes3D.get_proj(ax), np.diag([1.18, 1.1, 1, 1]))
# plt.title(node+' '+benchmark+' matrix size:'+str(m))
filename='/home/shahrzad/repos/Blazemark/results/3d/'+node+'_'+benchmark+'_'+str(m)+'_step_'+str(angle)+'.png'
plt.savefig(filename, dpi=300)
plt.gca()
if not animation:
plt.show()
pp.close()
#plot all data mflops based on grain_size and num_threads
for node in ['marvin', 'trillian']:
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/3d_plot_'+benchmark+'_'+node+'.pdf')
threads=[]
results=[]
grain_sizes=[]
m_sizes=[]
for m in mat_sizes[benchmark]:
chunk_sizes=[]
real_block_sizes=[]
block_sizes=[]
for th in range(1,9):#d_hpx[node][benchmark].keys():
bl=1
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(np.log10(grain_size))
chunk_sizes.append(c)
block_sizes.append(bl)
threads.append(th)
m_sizes.append(m)
if b not in real_block_sizes:
real_block_sizes.append(b)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
bl=bl+1
y=threads
x=grain_sizes
z=results
if not animation:
NUM_COLORS = len(mat_sizes[benchmark])
cm = plt.get_cmap('gist_rainbow')
fig = plt.figure(i)
ax = fig.add_subplot(1,1,1, projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
n=np.array(m_sizes)
n=(n-np.min(n))/(np.max(n)-np.min(n))
ax.scatter(x,y,z, marker='.', s=10, c='black', alpha=0.5,cmap=cm)
ax.view_init(elev=10, azim=170)
ax.set_xlabel('Grain size')
ax.set_ylabel('#cores')
ax.set_zlabel('Mflops')
plt.title(benchmark)
# plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+1
else:
# surf=ax.plot_trisurf(y, x, z, cmap=plt.cm.viridis, linewidth=0.2)
# fig.colorbar( surf, shrink=0.5, aspect=5)
# ax.view_init(10, 60)
# plt.show()
for angle in range(0,360,10):
fig = plt.figure(i)
ax = fig.gca(projection='3d')
NUM_COLORS = len(mat_sizes[benchmark])
triang = mtri.Triangulation(x, y)
n=np.array(m_sizes)
# ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c=n,alpha=0.5,cmap=plt.get_cmap('rainbow'))
ax.view_init(elev=10, azim=angle)
ax.set_xlabel('Grain size')
ax.set_ylabel('#cores')
ax.set_zlabel('Mflops')
ax.set_yticks(np.arange(1,9).tolist())
ax.set_xticklabels(np.arange(1,9).tolist())
ax.set_zlabel('Mflops',rotation=90)
ax.zaxis.set_rotate_label(False)
# plt.title(benchmark)
filename='/home/shahrzad/repos/Blazemark/results/3d/'+'all_'+node+'_'+benchmark+'_step_'+str(angle)+'.png'
plt.savefig(filename, dpi=300)
plt.gca()
hpx_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/marvin/'
hpx_dir1='/home/shahrzad/repos/Blazemark/data/matrix/09-15-2019/'
hpx_dir2='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/trillian/'
hpx_dir3='/home/shahrzad/repos/Blazemark/results/new_threads/'
hpx_dir4='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/work_stealing_off'
hpx_dir5='/home/shahrzad/repos/Blazemark/data/matrix/c7/splittable/all_cores/spt_min_0'
hpx_dir6='/home/shahrzad/repos/Blazemark/data/matrix/c7/splittable/idle_cores/2'
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([hpx_dir5])
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([hpx_dir,hpx_dir1,hpx_dir2,hpx_dir5,hpx_dir6])
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([hpx_dir4])
simdsize=4
import math
import csv
f=open('/home/shahrzad/repos/Blazemark/data/data_perf_all.csv','w')
f_writer=csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
f_writer.writerow(['runtime','node','benchmark','matrix_size','num_threads','block_size_row','block_size_col','num_elements','work_per_core','w1','w2','w3','w4','w5','w6','w7','w8','chunk_size','grain_size','num_blocks','num_blocks/chunk_size','num_elements*chunk_size','num_blocks/num_threads','num_blocks/(chunk_size*(num_threads-1))','L1cache','L2cache','L3cache','cache_line','set_associativity','datatype','cost','simd_size','execution_time','num_tasks','mflops','include'])
node_type=0
for node in d_hpx.keys():
if node=='marvin' or node=='marvin_old':
L1cache='32768'
L2cache='262144'
L3cache='20971520'
cache_line='8'
set_associativity='512'
simdsize=4
elif node=='trillian':
L1cache='65536'
L2cache='2097152'
L3cache='6291456'
cache_line='16'
simdsize=4
set_associativity='131072'
elif node=='medusa':
L1cache='32768'
L2cache='1048576'
L3cache='28835840'
cache_line='64'
set_associativity='16'
simdsize=8
benchmark_type=0
for benchmark in d_hpx[node].keys():
all_data=[]
for th in [th for th in d_hpx[node][benchmark].keys() if th<=8]:
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
for m in mat_sizes[benchmark]:
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
if not(benchmark=='dmatdmatadd' and (b=='64-64' or b=='8-1024' or b=='4-64')):
r=d_hpx[node][benchmark][th][b][c]['mflops'][k]
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
aligned_m=m
if m%simdsize!=0:
aligned_m=m+simdsize-m%simdsize
if th==1:
ratio=0
else:
ratio=str(num_blocks/(c*(th-1)))
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if aligned_m%b_c!=0:
for j in range(1,equalshare1+1):
if benchmark=='dmatdmatadd':
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r
elif benchmark=='dmatdmatdmatadd':
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r*2
else:
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r*(2*m)
if m%b_r!=0:
for j in range(1,equalshare2+1):
if benchmark=='dmatdmatadd':
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c
elif benchmark=='dmatdmatdmatadd':
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c*2
else:
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c*(2*m)
if aligned_m%b_c!=0 and m%b_r!=0:
if benchmark=='dmatdmatadd':
num_elements[-1]=(m%b_r)*(aligned_m%b_c)
elif benchmark=='dmatdmatdmatadd':
num_elements[-1]=(m%b_r)*(aligned_m%b_c)*2
else:
num_elements[-1]=(m%b_r)*(aligned_m%b_c)*(2*m)
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
aligned_m=m
if m%simdsize!=0:
aligned_m=m+simdsize-m%simdsize
if benchmark=='dmatdmatadd':
mflop=(aligned_m)*m
elif benchmark=='dmatdmatdmatadd':
mflop=2*(aligned_m)*m
else:
mflop=2*(aligned_m)**3
exec_time=mflop/r
num_tasks=np.ceil(num_blocks/c)
task_sizes=[0.]*int(num_tasks)
wc=[0.]*8
for i in range(int(num_tasks)):
task_sizes[i]=sum(num_elements[i*c:(i+1)*c])
for i in range(th):
wc[i]=sum([task_sizes[j] for j in range(len(task_sizes)) if j%th==i])
work_per_core=max(wc)
include=1
if num_mat*b_r*b_c*8>int(L2cache):
include=0
f_writer.writerow(['hpx',node,benchmark,str(m),str(th),b.split('-')[0],
b.split('-')[1], str(b_r * b_c), str(work_per_core),
str(wc[0]),str(wc[1]),str(wc[2]),str(wc[3]),
str(wc[4]),str(wc[5]),str(wc[6]),str(wc[7]),
str(c),str(grain_size),str(num_blocks), str(num_blocks/c),
str(b_r * b_c*c),str(num_blocks/th),ratio,L1cache,L2cache,L3cache,cache_line,set_associativity,str(data_type),str(cost),str(simdsize),str(exec_time),str(num_tasks),r,str(include)])
benchmark_type+=1
node_type+=1
f.close()
openmp_dir='/home/shahrzad/repos/Blazemark/data/matrix/06-13-2019/openmp/'
(d_openmp, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat([openmp_dir])
simdsize=4
import math
import csv
f=open('/home/shahrzad/repos/Blazemark/data/data_perf_all.csv','a')
f_writer=csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
node_type=0
for node in d_openmp.keys():
if node=='marvin':
L1cache='32768'
L2cache='262144'
L3cache='20971520'
cache_line='8'
set_associativity='512'
simdsize=4
elif node=='trillian':
L1cache='65536'
L2cache='2097152'
L3cache='6291456'
cache_line='16'
simdsize=4
set_associativity='131072'
elif node=='medusa':
L1cache='32768'
L2cache='1048576'
L3cache='28835840'
cache_line='64'
set_associativity='16'
simdsize=8
benchmark_type=0
for benchmark in d_openmp[node].keys():
all_data=[]
for th in [th for th in d_hpx[node][benchmark].keys() if th<=8]:
for b in d_openmp[node][benchmark][th].keys():
for c in d_openmp[node][benchmark][th][b].keys():
for m in mat_sizes[benchmark]:
k=d_openmp[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_openmp[node][benchmark][th][b][c].keys() and d_openmp[node][benchmark][th][b][c]['mflops'][k]:
if not(benchmark=='dmatdmatadd' and (b=='64-64' or b=='8-1024' or b=='4-64')):
r=d_openmp[node][benchmark][th][b][c]['mflops'][k]
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
aligned_m=m
if m%simdsize!=0:
aligned_m=m+simdsize-m%simdsize
if th==1:
ratio=0
else:
ratio=str(num_blocks/(c*(th-1)))
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if aligned_m%b_c!=0:
for j in range(1,equalshare1+1):
if benchmark=='dmatdmatadd':
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r
elif benchmark=='dmatdmatdmatadd':
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r*2
else:
num_elements[j*equalshare2-1]=(aligned_m%b_c)*b_r*(2*m)
if m%b_r!=0:
for j in range(1,equalshare2+1):
if benchmark=='dmatdmatadd':
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c
elif benchmark=='dmatdmatdmatadd':
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c*2
else:
num_elements[(equalshare1-1)*equalshare2+j-1]=(m%b_r)*b_c*(2*m)
if aligned_m%b_c!=0 and m%b_r!=0:
if benchmark=='dmatdmatadd':
num_elements[-1]=(m%b_r)*(aligned_m%b_c)
elif benchmark=='dmatdmatdmatadd':
num_elements[-1]=(m%b_r)*(aligned_m%b_c)*2
else:
num_elements[-1]=(m%b_r)*(aligned_m%b_c)*(2*m)
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
if benchmark=='dmatdmatadd':
mflop=(aligned_m)*m
elif benchmark=='dmatdmatdmatadd':
mflop=2*(aligned_m)*m
else:
mflop=2*(aligned_m)**3
exec_time=mflop/r
num_tasks=np.ceil(num_blocks/c)
task_sizes=[0.]*int(num_tasks)
wc=[0.]*8
for i in range(int(num_tasks)):
task_sizes[i]=sum(num_elements[i*c:(i+1)*c])
for i in range(th):
wc[i]=sum([task_sizes[j] for j in range(len(task_sizes)) if j%th==i])
work_per_core=max(wc)
f_writer.writerow(['openmp',node,benchmark,str(m),str(th),b.split('-')[0],
b.split('-')[1], str(b_r * b_c), str(work_per_core),
str(wc[0]),str(wc[1]),str(wc[2]),str(wc[3]),
str(wc[4]),str(wc[5]),str(wc[6]),str(wc[7]),
str(c),
str(grain_size),str(num_blocks), str(num_blocks/c),
str(b_r * b_c*c),str(num_blocks/th),ratio,L1cache,L2cache,L3cache,cache_line,set_associativity,str(data_type),str(cost),str(simdsize),str(exec_time),str(num_tasks),r])
benchmark_type+=1
node_type+=1
f.close()
f=open('/home/shahrzad/repos/Blazemark/data/data_perf_max.csv','a')
f_writer=csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
f_writer.writerow(['node','benchmark','matrix_size','num_threads','block_size_row','block_size_col','num_elements','num_elements_uncomplete','chunk_size','grain_size','num_blocks','num_blocks/chunk_size','num_elements*chunk_size','num_blocks/num_threads','num_blocks/(chunk_size*(num_threads-1))','L1cache','L2cache','L3cache','cache_line','set_associativity','datatype','cost','mflops'])
max_results={}
min_max_grain_size={}
for node in d_hpx.keys():
min_max_grain_size[node]={}
if node=='marvin':
L1cache='32768'
L2cache='262144'
L3cache='20971520'
cache_line='64'
set_associativity='512'
elif node=='trillian':
L1cache='65536'
L2cache='2097152'
L3cache='6291456'
cache_line='64'
set_associativity='131072'
max_results[node]={}
for benchmark in d_hpx[node].keys():
min_max_grain_size[node][benchmark]={}
max_results[node][benchmark]={}
for th in d_hpx[node][benchmark].keys():
min_max_grain_size[node][benchmark][th]={}
max_results[node][benchmark][th]={}
for m in mat_sizes[benchmark]:
min_max_grain_size[node][benchmark][th][m]={}
min_grain=np.inf
max_grain=0
max_results[node][benchmark][th][m]=[]
results=[]
chunk_sizes=[]
bs=[]
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b]:
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
chunk_sizes.append(c)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
bs.append(b)
if len(chunk_sizes)!=0:
max_value=max(results)
for r in results:
c=chunk_sizes[results.index(r)]
b=bs[results.index(r)]
max_results[node][benchmark][th][m].append((c,b,r))
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
if th==1:
ratio=0
else:
ratio=str(num_blocks/(c*(th-1)))
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
data_type=8
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
if max_value-r<0.1*max_value:
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
if m<1000:
if grain_size<min_grain:
min_grain=grain_size
if grain_size>max_grain:
max_grain=grain_size
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
r=d_hpx[node][benchmark][th][b][c]['mflops'][k]
f_writer.writerow([node,benchmark,str(m),str(th),b.split('-')[0],
b.split('-')[1], str(b_r * b_c),str(num_elements_uncomplete), str(c),
str(grain_size),str(num_blocks), str(num_blocks/c),
str(b_r * b_c*c),str(num_blocks/th),ratio,L1cache,L2cache,L3cache,cache_line,set_associativity,str(data_type),str(cost),str(r)])
if m<1000:
print('matrix size:'+str(m)+' num_threads:'+str(th)+' min_grain_size:'+str(min_grain)+' max_grain_size:'+str(max_grain))
min_max_grain_size[node][benchmark][th][m]['min']=min_grain
min_max_grain_size[node][benchmark][th][m]['max']=max_grain
f.close()
for node in d_hpx.keys():
for benchmark in d_hpx[node].keys():
for th in d_hpx[node][benchmark].keys():
mins=[]
maxs=[]
for m in mat_sizes[benchmark]:
mins.append(min_max_grain_size[node][benchmark][th][m]['min'])
maxs.append(min_max_grain_size[node][benchmark][th][m]['max'])
plt.figure(i)
N = len(mat_sizes[benchmark])
ind = np.arange(N) # the x locations for the groups
width = 1 # the width of the bars
fig, ax = plt.subplots()
# plt.axes([0, 0, 2, 1])
#
rects1 = ax.bar(ind, mins, width, color='r')
rects2 = ax.bar(ind, maxs, width, color='b')
# add some text for labels, title and axes ticks
ax.set_ylabel('Scores')
ax.set_title('Min and Max grain size '+str(th)+' threads')
ax.set_xticks(ind + width / 2)
ax.set_xticklabels(('G1', 'G2', 'G3', 'G4', 'G5'))
ax.legend((rects1[0], rects2[0]), ('Min', 'Max'))
def autolabel(rects):
"""
Attach a text label above each bar displaying its height
"""
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., 1.05*height,
'%d' % int(height),
ha='center', va='bottom')
#
# autolabel(rects1)
# autolabel(rects2)
plt.show()
i=i+1
#convert -delay 50 step_*.png animated.gif
#################################################################
#cache-miss based on block_size and matrix_size for chunk_size=1
#################################################################
c=1
animation=0
p3d=0
#plot number of cache misses based on matrix size for a chunk size and a block size
for benchmark in benchmarks:
for th in d_hpx[node][benchmark].keys():
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
results=[]
bl=1
block_sizes=[]
m_sizes=[]
avg_l2_miss_rate=[]
real_block_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
for m in mat_sizes[benchmark]:
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
mflop=0
if 'add' in benchmark:
mflop=b_r*b_c
else:
mflop=b_r*b_c*(2*m)
m_sizes.append(m)
block_sizes.append(bl)
real_block_sizes.append(b)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
avg_l2_miss_rate.append(sum(ind_miss)/th)
bl=bl+1
y=block_sizes
x=m_sizes
z=avg_l2_miss_rate
if p3d:
if not animation:
fig = plt.figure(i)
ax = fig.add_subplot(1,1,1, projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=110)
ax.set_xlabel('Matrix size')
ax.set_ylabel('Block size')
ax.set_zlabel('L2 cache miss rate')
plt.title(benchmark+' matrix size:'+str(m)+' '+str(th)+' threads')
# plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+1
else:
# surf=ax.plot_trisurf(y, x, z, cmap=plt.cm.viridis, linewidth=0.2)
# fig.colorbar( surf, shrink=0.5, aspect=5)
# ax.view_init(10, 60)
# plt.show()
for angle in range(0,360,10):
fig = plt.figure(i)
ax = fig.gca(projection='3d')
triang = mtri.Triangulation(x, y)
ax.plot_trisurf(triang, z, cmap='jet')
ax.scatter(x,y,z, marker='.', s=10, c="black", alpha=0.5)
ax.view_init(elev=10, azim=angle)
ax.set_xlabel('Grain size')
ax.set_ylabel('Block size')
ax.set_zlabel('L2 cache miss rate')
plt.title(benchmark+' chunk size:1 '+str(th)+' threads')
filename='/home/shahrzad/repos/Blazemark/results/png/step_'+str(angle)+'.png'
plt.savefig(filename, dpi=96)
plt.gca()
if not animation:
plt.show()
pp.close()
else:
plt.figure(i)
plt.plot(real_block_sizes,z, label=str(th)+' threads matrix_size:'+str(m))
plt.ylabel('l2_cache_misse rate')
plt.xlabel('block size')
plt.title(benchmark+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
#################################################################
#cache-miss based on block_size and matrix_size for chunk_size=1
#################################################################
c=1
b='4-1024'
node='marvin'
benchmark='dmatdmatadd'
th=4
#plot number of cache misses based on matrix size for a chunk size and a block size
for benchmark in benchmarks:
for th in d_hpx[node][benchmark].keys():
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
results=[]
for b in d_hpx[node][benchmark][th].keys():
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
for m in mat_sizes[benchmark]:
chunk_sizes=[]
grain_sizes=[]
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
l2_miss_rate.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)])
if len(chunk_sizes)!=0:
for t in range(th):
tl=[l[t] for l in l2_miss_rate]
plt.figure(i+t)
plt.plot(mat_sizes[benchmark], tl, label=str(th)+' threads block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2)+' block size '+str(b)+' core '+str(t))
plt.ylabel('l2_cache_misse rate')
plt.xlabel('matrix size')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
i=i+th
#################################################################
#cache-miss based on block_size and matrix_size for all chunk_sizes
#################################################################
for node in d_hpx.keys():
#plot number of cache misses based on matrix size for a chunk size and a block size
for benchmark in d_hpx[node].keys():
for th in d_hpx[node][benchmark].keys():
# pp = PdfPages(perf_directory+'/bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
results=[]
for m in mat_sizes[benchmark]:
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
grain_sizes=[]
avg_l2_miss_rate=[]
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
l2_miss_rate.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
avg_l2_miss_rate.append(sum(ind_miss)/th)
plt.figure(i)
indices=np.argsort(np.array(grain_sizes))
plt.plot([grain_sizes[i] for i in indices], [avg_l2_miss_rate[i] for i in indices], label=str(th)+' threads matrix_size:'+str(m))
plt.ylabel('l2_cache_misse rate')
plt.xlabel('grain size')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
i=i+1
(d_hpx, chunk_sizes, block_sizes, thr, benchmarks, mat_sizes)=create_dict_relative_norepeat_counters_onebyone(papi_directory)
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/matrix/08-07-2019/performance_counters'
c=1
node='marvin'
benchmark='dmatdmatadd'
th=4
m=912
#plot number of cache misses based on block size for a chunk size and a matrix size
for node in d_hpx.keys():
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/cache_miss_rate_'+benchmark+'_'+node+'.pdf')
for th in d_hpx[node][benchmark].keys():
for c in [1,2]:
results=[]
for m in mat_sizes[benchmark]:
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
avg_l2_miss_rate=[]
block_sizes=[]
grain_sizes=[]
chunk_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
block_sizes.append(b)
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
l2_miss_rate.append(ind_miss)
avg_l2_miss_rate.append(sum(ind_miss)/th)
if len(chunk_sizes)!=0:
plt.figure(i)
plt.axes([0, 0, 2, 1])
plt.plot(block_sizes, avg_l2_miss_rate, label='matrix size:'+str(m))
plt.ylabel('L2_cache_misse rate')
plt.xlabel('block size')
plt.title(node+' '+benchmark+' chunk_size: '+str(c)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
G=np.argsort(np.asarray(grain_sizes))
plt.figure(i+1)
plt.ylabel('L2_cache_misse rate')
plt.xlabel('grain size')
plt.title(node+' '+benchmark+' chunk_size: '+str(c)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.plot([grain_sizes[g] for g in G], [avg_l2_miss_rate[g] for g in G], label='matrix size:'+str(m), marker='+')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
xs=[grain_sizes[g] for g in G]
ys=[avg_l2_miss_rate[g] for g in G]
zs=[block_sizes[g] for g in G]
for x,y,z in zip(xs,ys,zs):
label = (z)
plt.annotate(label, # this is the text
(x,y), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0,10), # distance from text to points (x,y)
ha='center') # horizontal alignment can be left, right or center
# plt.savefig(pp, format='pdf',bbox_inches='tight')
# print('')
i=i+2
plt.show()
pp.close()
c=1
node='marvin'
benchmark='dmatdmatadd'
th=4
m=912
#plot number of cache misses based on block size for a matrix size and a chunk size
for node in d_hpx.keys():
for benchmark in benchmarks:
# pp = PdfPages(perf_directory+'/cache_miss_rate_'+benchmark+'_'+node+'.pdf')
for th in d_hpx[node][benchmark].keys():
for m in mat_sizes[benchmark]:
results=[]
for c in d_hpx[node][benchmark][1]['4-1024'].keys():
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
avg_l2_miss_rate=[]
block_sizes=[]
grain_sizes=[]
chunk_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
block_sizes.append(b)
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
l2_miss_rate.append(ind_miss)
avg_l2_miss_rate.append(sum(ind_miss)/th)
if len(chunk_sizes)!=0:
plt.figure(i)
plt.axes([0, 0, 2, 1])
plt.plot(block_sizes, avg_l2_miss_rate, label='chunk size:'+str(c))
plt.ylabel('L2_cache_misse rate')
plt.xlabel('block size')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
G=np.argsort(np.asarray(grain_sizes))
plt.figure(i+1)
plt.ylabel('L2_cache_misse rate')
plt.xlabel('grain size')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.plot([grain_sizes[g] for g in G], [avg_l2_miss_rate[g] for g in G], label='matrix size:'+str(m), marker='+')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
xs=[grain_sizes[g] for g in G]
ys=[avg_l2_miss_rate[g] for g in G]
zs=[block_sizes[g] for g in G]
for x,y,z in zip(xs,ys,zs):
label = (z)
plt.annotate(label, # this is the text
(x,y), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0,10), # distance from text to points (x,y)
ha='center') # horizontal alignment can be left, right or center
# plt.savefig(pp, format='pdf',bbox_inches='tight')
# print('')
i=i+2
plt.show()
pp.close()
#plot number of cache misses based on chunk size for a matrix size and a block size
for node in d_hpx.keys():
for benchmark in benchmarks:
pp = PdfPages(perf_directory+'/cache_miss_rate_grain_size_'+benchmark+'_'+node+'.pdf')
for th in d_hpx[node][benchmark].keys():
for m in mat_sizes[benchmark]:
results=[]
overall_avg_l2_miss_rate=[]
overall_grain_sizes=[]
for b in d_hpx[node][benchmark][th].keys():
l2_cm=[]
l2_ch=[]
l2_miss_rate=[]
avg_l2_miss_rate=[]
block_sizes=[]
grain_sizes=[]
chunk_sizes=[]
for c in d_hpx[node][benchmark][1]['4-1024'].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
block_sizes.append(b)
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
# block_sizes.append(str(b_r)+'-'+str(b_c))
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
l2_cm.append(d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'])
l2_ch.append([d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][l]-d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][l] for l in range(th)])
ind_miss=[d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tcm'][j]/d_hpx[node][benchmark][th][b][c]['counters'][k]['avg']['papi_tca'][j] for j in range(th)]
l2_miss_rate.append(ind_miss)
avg_l2_miss_rate.append(sum(ind_miss)/th)
overall_avg_l2_miss_rate.append(sum(ind_miss)/th)
overall_grain_sizes.append(grain_size)
if len(chunk_sizes)!=0:
plt.figure(i)
plt.axes([0, 0, 2, 1])
plt.plot(chunk_sizes, avg_l2_miss_rate, label='block size:'+str(b))
plt.ylabel('L2_cache_misse rate')
plt.xlabel('chunk size')
plt.xscale('log')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
G=np.argsort(np.asarray(grain_sizes))
plt.figure(i+1)
plt.ylabel('L2_cache_misse rate')
plt.xlabel('grain size')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.plot([grain_sizes[g] for g in G], [avg_l2_miss_rate[g] for g in G], label='block size:'+str(b), marker='+')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.figure(i+2)
plt.axes([0, 0, 2, 1])
plt.plot(grain_sizes, avg_l2_miss_rate, label='block size:'+str(b))
plt.ylabel('L2_cache_misse rate')
plt.xlabel('grain size')
plt.xscale('log')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
xs=[grain_sizes[g] for g in G]
ys=[avg_l2_miss_rate[g] for g in G]
zs=[block_sizes[g] for g in G]
for x,y,z in zip(xs,ys,zs):
label = (z)
plt.annotate(label, # this is the text
(x,y), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0,10), # distance from text to points (x,y)
ha='center') # horizontal alignment can be left, right or center
indices=np.argsort(np.asarray(overall_grain_sizes))
plt.figure('1')
plt.axes([0, 0, 2, 1])
plt.plot([overall_grain_sizes[i] for i in indices], [overall_avg_l2_miss_rate[i] for i in indices], label='matrix size:'+str(m))
plt.ylabel('L2_cache_misse rate')
plt.xlabel('chunk size')
plt.xscale('log')
plt.title(node+' '+benchmark+' matrix_size: '+str(m)+' '+str(th)+' threads')
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+4
plt.show()
pp.close()
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/06-13-2019/grain_size/'
###bath tub mflops_vs_grainsize_one-blocksize
i=1
for node in ['medusa','trillian']:#d_hpx.keys():
for benchmark in d_hpx[node].keys():
for th in d_hpx[node][benchmark].keys():
pp = PdfPages(perf_directory+'/'+node+'_bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
plt.figure(i)
for b in d_hpx[node][benchmark][th].keys():
results=[]
chunk_sizes=[]
grain_sizes=[]
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
if len(chunk_sizes)!=0:
# plt.plot(chunk_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
plt.plot(grain_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
plt.xlabel("grain_size")
# plt.xlabel("chunk_size")
plt.ylabel('MFlops')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
###bath tub mflops_vs_number_of_tasks_one-blocksize
i=1
for node in d_hpx.keys():
for benchmark in d_hpx[node].keys():
for th in d_hpx[node][benchmark].keys():
pp = PdfPages(perf_directory+'/'+node+'_bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'_num_tasks.pdf')
for m in mat_sizes[benchmark]:
plt.figure(i)
for b in d_hpx[node][benchmark][th].keys():
results=[]
chunk_sizes=[]
grain_sizes=[]
num_tasks=[]
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
num_tasks.append(np.ceil(num_blocks/c))
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(1/d_hpx[node][benchmark][th][b][c]['mflops'][k])
if len(chunk_sizes)!=0:
plt.figure(i)
plt.plot(grain_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
plt.xlabel("grain_size")
# plt.xlabel("chunk_size")
plt.ylabel('1/MFlops')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.plot(chunk_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
plt.figure(i+1)
plt.plot(num_tasks, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
plt.xlabel("num_tasks")
# plt.xlabel("chunk_size")
plt.ylabel('1/MFlops')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
perf_directory='/home/shahrzad/repos/Blazemark/data/performance_plots/06-13-2019/grain_size/'
###bath tub mflops_vs_chunksize_one-blocksize
i=1
for node in d_hpx.keys():
for benchmark in benchmarks:
for th in d_hpx[node][benchmark].keys():
pp = PdfPages(perf_directory+node+'_bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
plt.figure(i)
for b in [b for b in d_hpx[node][benchmark][th] if b not in ['64-64']]:#d_hpx[node][benchmark][th].keys():
results=[]
chunk_sizes=[]
grain_sizes=[]
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
if len(chunk_sizes)!=0:
plt.scatter(chunk_sizes, results, label=str(int(th))+' threads matrix_size:'+str(int(m))+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(grain_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(grain_sizes, results, label='block_size:'+str(b_r)+'-'+str(int(b_c))+' num_blocks:'+str(equalshare1*equalshare2))
plt.xlabel("grain_size")
# plt.xlabel("chunk_size")
plt.ylabel('MFlops')
plt.xscale('log')
# plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig('/home/shahrzad/src/Dissertation/images/fig9.png',dpi=300,bbox_inches='tight')
#
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
import random
i=1
for node in d_hpx.keys():
for benchmark in benchmarks:
for m in [m for m in mat_sizes[benchmark] if m>700]: #[230., 300., 455., 690.,793.]: #
import random
pp = PdfPages(perf_directory+node+'_bath_tub_'+benchmark+'_different_matrix_sizes_'+str(th)+'.pdf')
plt.figure(i)
number_of_colors = len(mat_sizes[benchmark])
color = ["#"+''.join([random.choice('0123456789ABCDEF') for j in range(6)])
for i in range(number_of_colors)]
c_index=0
for th in range(1,9):#d_hpx[node][benchmark].keys():
results=[]
chunk_sizes=[]
grain_sizes=[]
for b in [b for b in d_hpx[node][benchmark][th] if b not in ['64-64', '4-64']]:#d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
chunk_sizes.append(c)
num_blocks=equalshare1*equalshare2
num_elements_uncomplete=0
if b_c<m:
num_elements_uncomplete=(m%b_c)*b_r
mflop=0
if benchmark=='dmatdmatadd':
mflop=b_r*b_c
elif benchmark=='dmatdmatdmatadd':
mflop=b_r*b_c*2
else:
mflop=b_r*b_c*(2*m)
num_elements=[mflop]*num_blocks
if num_elements_uncomplete:
for j in range(1,equalshare1+1):
num_elements[j*equalshare2-1]=num_elements_uncomplete
data_type=8
grain_size=sum(num_elements[0:c])
num_mat=3
if benchmark=='dmatdmatdmatadd':
num_mat=4
cost=c*mflop*num_mat/data_type
grain_sizes.append(grain_size)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
# plt.plot(chunk_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(grain_sizes, results, label=str(th)+' threads matrix_size:'+str(m)+' block_size:'+str(b_r)+'-'+str(b_c)+' num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(grain_sizes, results, label='block_size:'+str(b_r)+'-'+str(int(b_c))+' num_blocks:'+str(equalshare1*equalshare2))
# plt.scatter(grain_sizes, results,label='matrix size:'+str(int(m)),color=color[c_index],marker='.')
plt.scatter(grain_sizes, results,label=str(int(th))+' cores',color=color[c_index],marker='.')
plt.xlabel("grain_size")
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.xlabel("chunk_size")
plt.ylabel('MFlops')
plt.xscale('log')
# plt.title(benchmark)
plt.grid(True, 'both')
c_index+=1
plt.figure(i)
plt.savefig('/home/shahrzad/src/Dissertation/images/fig13.png',dpi=300,bbox_inches='tight')
#
print('')
plt.savefig(pp, format='pdf',bbox_inches='tight')
i=i+1
plt.show()
pp.close()
##########################################
#3d plot
import randomfrom mpl_toolkits import mplot3d
import randomimport numpy as np
import matplotlib.pyplot as plt
fig = plt.figure()
ax = plt.axes(projection='3d')
ax = plt.axes(projection='3d')
##################import random########################
# Data for a three-dimensional line
zline = np.linspace(0, 15, 1000)
xline = np.sin(zline)
yline = np.cos(zline)
ax.plot3D(xline, yline, zline, 'gray')
# Data for three-dimensional scattered points
zdata = 15 * np.random.random(100)
xdata = np.sin(zdata) + 0.1 * np.random.randn(100)
ydata = np.cos(zdata) + 0.1 * np.random.randn(100)
ax.scatter3D(xdata, ydata, zdata, c=zdata, cmap='Greens');
f=open('/home/shahrzad/repos/Blazemark/data/data_chunks.csv','w')
f_writer=csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
f_writer.writerow(['benchmark','matrix_size','num_threads','block_size_row','block_size_col','num_elements','chunk_size','num_blocks','mflops','num_blocks/chunk_size','num_elements*chunk_size','num_blocks/num_threads','num_blocks/(chunk_size*(num_threads-1))'])
max_results={}
all_data=[]
for benchmark in benchmarks:
max_results[benchmark]={}
for th in d_hpx[benchmark].keys():
max_results[benchmark][th]={}
for m in mat_sizes[benchmark]:
max_results[benchmark][th][m]=[]
results=[]
chunk_sizes=[]
bs=[]
for b in d_hpx[benchmark][th].keys():
for c in d_hpx[benchmark][th][b]:
k=d_hpx[benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[benchmark][th][b][c].keys() and d_hpx[benchmark][th][b][c]['mflops'][k]:
chunk_sizes.append(c)
results.append(d_hpx[benchmark][th][b][c]['mflops'][k])
bs.append(b)
if len(chunk_sizes)!=0:
max_value=max(results)
for r in results:
chunk_size=chunk_sizes[results.index(r)]
b_size=bs[results.index(r)]
max_results[benchmark][th][m].append((chunk_size,b_size,r))
b_r=int(b_size.split('-')[0])
b_c=int(b_size.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
if th==1:
ratio='NA'
else:
ratio=str(num_blocks/(chunk_size*(th-1)))
if benchmark=='dmatdmatadd':
all_data.append([m,th, (b_r * b_c), b_r,b_c,(chunk_size),
(num_blocks), (num_blocks/chunk_size),
(b_r * b_c*chunk_size),(num_blocks/th), r])
if max_value-r<0.1*max_value:
f_writer.writerow([benchmark,str(m),str(th),b_size.split('-')[0],
b_size.split('-')[1], str(b_r * b_c), str(chunk_size),
str(num_blocks), str(r),str(num_blocks/chunk_size),
str(b_r * b_c*chunk_size),str(num_blocks/th),ratio])
f.close()
#######just the max value
max_results={}
all_data=[]
for benchmark in benchmarks:
max_results[benchmark]={}
for th in d_hpx[benchmark].keys():
max_results[benchmark][th]={}
for m in mat_sizes[benchmark]:
max_results[benchmark][th][m]=[]
results=[]
chunk_sizes=[]
bs=[]
for b in d_hpx[benchmark][th].keys():
for c in d_hpx[benchmark][th][b]:
k=d_hpx[benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[benchmark][th][b][c].keys() and d_hpx[benchmark][th][b][c]['mflops'][k]:
chunk_sizes.append(c)
results.append(d_hpx[benchmark][th][b][c]['mflops'][k])
bs.append(b)
if len(chunk_sizes)!=0:
max_value=max(results)
r=(max_value)
chunk_size=chunk_sizes[results.index(r)]
b_size=bs[results.index(r)]
max_results[benchmark][th][m].append((chunk_size,b_size,r))
b_r=int(b_size.split('-')[0])
b_c=int(b_size.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
num_blocks=equalshare1*equalshare2
if th==1:
ratio='NA'
else:
ratio=str(num_blocks/(chunk_size*(th-1)))
if benchmark=='dmatdmatadd':
all_data.append([m,th, (b_r * b_c), b_r,b_c,(chunk_size),
(num_blocks), (num_blocks/chunk_size),
(b_r * b_c*chunk_size),(num_blocks/th), r])
##mflops_vs_chunksize_different-blocksizes
for benchmark in benchmarks:
for th in d_hpx[benchmark].keys():
# pp = PdfPages(perf_directory+'/'+benchmark+'_different_blocks_'+str(th)+'.pdf')
for m in mat_sizes[benchmark]:
plt.figure(i)
for b in [b for b in d_hpx[node][benchmark][th] if b not in ['64-64']]:
results=[]
chunk_sizes=[]
for c in d_hpx[node][benchmark][th][b]:
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
chunk_sizes.append(c)
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
if len(chunk_sizes)!=0:
b_r=int(b.split('-')[0])
b_c=int(b.split('-')[1])
rest1=b_r%simdsize
rest2=b_c%simdsize
if b_r>m:
b_r=m
if b_c>m:
b_c=m
if b_c%simdsize!=0:
b_c=b_c+simdsize-b_c%simdsize
equalshare1=math.ceil(m/b_r)
equalshare2=math.ceil(m/b_c)
plt.figure(i)
plt.plot(chunk_sizes, results, label='block_size:'+str(b)+', num_blocks:'+str(equalshare1*equalshare2))
# plt.plot(chunk_sizes, results, label=str(int(th))+' threads matrix_size:'+str(m)+' block_size:'+str(b)+' num_blocks:'+str(equalshare1*equalshare2))
plt.xlabel("chunk_size")
plt.ylabel('MFlops')
plt.xscale('log')
# plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.savefig(pp, format='pdf',bbox_inches='tight')
plt.savefig('/home/shahrzad/src/Dissertation/images/fig7.png',dpi=300,bbox_inches='tight')
print('')
i=i+1
plt.show()
pp.close()
#####performance mflops_vs_matrixsize_one_block
c=1
for node in d_hpx.keys():
for benchmark in d_hpx[node].keys():
for th in d_hpx[node][benchmark].keys():
results=[]
plt.figure(i)
# pp = PdfPages(perf_directory+'/performance_'+benchmark+'_'+b+'-chunk_size_'+str(c)+'.pdf')
for m in mat_sizes[benchmark]:
for b in d_hpx[node][benchmark][th].keys():
for c in d_hpx[node][benchmark][th][b].keys():
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]!=0:
results.append(d_hpx[node][benchmark][th][b][c]['mflops'][k])
print(m,k,th,b,c)
plt.figure(i)
plt.plot(mat_sizes[benchmark], results, label=str(th)+' threads block_size:'+str(b))
plt.xlabel("matrix_size")
plt.ylabel('MFlops')
plt.xscale('log')
plt.title(benchmark)
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+1
plt.show()
pp.close()
#####performance mflops_vs_matrixsize_different_blocksizes chunk_size=1
for c in [7]:
for benchmark in benchmarks:
pp = PdfPages(perf_directory+'/performance_'+benchmark+'_different_blocks-chunk_size_'+str(c)+'.pdf')
for th in d_hpx[benchmark].keys():
plt.figure(i)
for b in block_sizes[benchmark]:
results=[]
for m in mat_sizes[benchmark]:
k=d_hpx[benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[benchmark][th][b][c].keys() :
results.append(d_hpx[benchmark][th][b][c]['mflops'][k])
plt.figure(i)
plt.plot(mat_sizes[benchmark], results, label=str(th)+' threads block_size:'+str(b))
plt.xlabel("matrix_size")
plt.ylabel('MFlops')
plt.xscale('log')
plt.title(benchmark + ' chunk_size:'+str(c))
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.savefig(pp, format='pdf',bbox_inches='tight')
print('')
i=i+1
plt.show()
pp.close()
#####performance different benchmarks
for th in d_hpx[node][benchmark].keys():
for b in ['4-128', '4-256']:
# pp = PdfPages(perf_directory+'/performance_dmatdmatadd_dmatdmatdmatadd_'+b+'_'+str(th)+'.pdf')
plt.figure(i)
s=''
for benchmark in benchmarks:
results=[]
for m in mat_sizes[benchmark]:
c=1
k=d_hpx[node][benchmark][th][b][c]['size'].index(m)
if 'mflops' in d_hpx[node][benchmark][th][b][c].keys() and d_hpx[node][benchmark][th][b][c]['mflops'][k]:
results.append((2*m**2)/d_hpx[node][benchmark][th][b][c]['mflops'][k])
else:
results.append(0)
plt.figure(i)
plt.plot(mat_sizes[benchmark], results, label=benchmark+' '+str(th)+' threads block_size:'+str(b))
plt.xlabel("matrix_size")
plt.ylabel('MFlops')
plt.xscale('log')
s+=benchmark+'-'
plt.title(s[:-1])
plt.grid(True, 'both')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.savefig(pp, format='pdf',bbox_inches='tight')
# print('')
i=i+1
plt.show()
pp.close()
| 50.051948
| 479
| 0.447483
| 17,544
| 154,160
| 3.74601
| 0.029184
| 0.01619
| 0.052724
| 0.055356
| 0.92462
| 0.908125
| 0.886229
| 0.861458
| 0.843807
| 0.8236
| 0
| 0.024977
| 0.414621
| 154,160
| 3,080
| 480
| 50.051948
| 0.703287
| 0.079878
| 0
| 0.87016
| 0
| 0.005866
| 0.080663
| 0.021587
| 0.000391
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007431
| null | null | 0.007822
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
968cdce5aa0ef47c65e9085343cceec14336b70d
| 197
|
py
|
Python
|
reinvent_scoring/scoring/score_components/rocs/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | null | null | null |
reinvent_scoring/scoring/score_components/rocs/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | 2
|
2021-11-01T23:19:42.000Z
|
2021-11-22T23:41:39.000Z
|
reinvent_scoring/scoring/score_components/rocs/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | 2
|
2021-11-18T13:14:22.000Z
|
2022-03-16T07:52:57.000Z
|
from reinvent_scoring.scoring.score_components.rocs.rocs_similarity import RocsSimilarity
from reinvent_scoring.scoring.score_components.rocs.parallel_rocs_similarity import ParallelRocsSimilarity
| 65.666667
| 106
| 0.918782
| 23
| 197
| 7.565217
| 0.478261
| 0.137931
| 0.218391
| 0.298851
| 0.517241
| 0.517241
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0.040609
| 197
| 2
| 107
| 98.5
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9697f2fcda203ca66379c4c297f7ef69c3a26b50
| 161,504
|
py
|
Python
|
operators/ibm-block-csi-operator-community/python/pulumi_pulumi_kubernetes_crds_operators_ibm_block_csi_operator_community/csi/v1/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
operators/ibm-block-csi-operator-community/python/pulumi_pulumi_kubernetes_crds_operators_ibm_block_csi_operator_community/csi/v1/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | 2
|
2020-09-18T17:12:23.000Z
|
2020-12-30T19:40:56.000Z
|
operators/ibm-block-csi-operator-community/python/pulumi_pulumi_kubernetes_crds_operators_ibm_block_csi_operator_community/csi/v1/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'IBMBlockCSISpec',
'IBMBlockCSISpecController',
'IBMBlockCSISpecControllerAffinity',
'IBMBlockCSISpecControllerAffinityNodeAffinity',
'IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference',
'IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions',
'IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields',
'IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms',
'IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions',
'IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields',
'IBMBlockCSISpecControllerAffinityPodAffinity',
'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector',
'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions',
'IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector',
'IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions',
'IBMBlockCSISpecControllerAffinityPodAntiAffinity',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector',
'IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions',
'IBMBlockCSISpecControllerTolerations',
'IBMBlockCSISpecNode',
'IBMBlockCSISpecNodeAffinity',
'IBMBlockCSISpecNodeAffinityNodeAffinity',
'IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference',
'IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions',
'IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields',
'IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms',
'IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions',
'IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields',
'IBMBlockCSISpecNodeAffinityPodAffinity',
'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector',
'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions',
'IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector',
'IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions',
'IBMBlockCSISpecNodeAffinityPodAntiAffinity',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector',
'IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions',
'IBMBlockCSISpecNodeTolerations',
'IBMBlockCSISpecSidecars',
'IBMBlockCSIStatus',
]
@pulumi.output_type
class IBMBlockCSISpec(dict):
"""
IBMBlockCSISpec defines the desired state of IBMBlockCSI
"""
def __init__(__self__, *,
controller: 'outputs.IBMBlockCSISpecController',
node: 'outputs.IBMBlockCSISpecNode',
image_pull_secrets: Optional[Sequence[str]] = None,
sidecars: Optional[Sequence['outputs.IBMBlockCSISpecSidecars']] = None):
"""
IBMBlockCSISpec defines the desired state of IBMBlockCSI
:param 'IBMBlockCSISpecControllerArgs' controller: IBMBlockCSIControllerSpec defines the desired state of IBMBlockCSIController
:param 'IBMBlockCSISpecNodeArgs' node: IBMBlockCSINodeSpec defines the desired state of IBMBlockCSINode
"""
pulumi.set(__self__, "controller", controller)
pulumi.set(__self__, "node", node)
if image_pull_secrets is not None:
pulumi.set(__self__, "image_pull_secrets", image_pull_secrets)
if sidecars is not None:
pulumi.set(__self__, "sidecars", sidecars)
@property
@pulumi.getter
def controller(self) -> 'outputs.IBMBlockCSISpecController':
"""
IBMBlockCSIControllerSpec defines the desired state of IBMBlockCSIController
"""
return pulumi.get(self, "controller")
@property
@pulumi.getter
def node(self) -> 'outputs.IBMBlockCSISpecNode':
"""
IBMBlockCSINodeSpec defines the desired state of IBMBlockCSINode
"""
return pulumi.get(self, "node")
@property
@pulumi.getter(name="imagePullSecrets")
def image_pull_secrets(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "image_pull_secrets")
@property
@pulumi.getter
def sidecars(self) -> Optional[Sequence['outputs.IBMBlockCSISpecSidecars']]:
return pulumi.get(self, "sidecars")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecController(dict):
"""
IBMBlockCSIControllerSpec defines the desired state of IBMBlockCSIController
"""
def __init__(__self__, *,
repository: str,
tag: str,
affinity: Optional['outputs.IBMBlockCSISpecControllerAffinity'] = None,
image_pull_policy: Optional[str] = None,
tolerations: Optional[Sequence['outputs.IBMBlockCSISpecControllerTolerations']] = None):
"""
IBMBlockCSIControllerSpec defines the desired state of IBMBlockCSIController
:param 'IBMBlockCSISpecControllerAffinityArgs' affinity: Affinity is a group of affinity scheduling rules.
:param str image_pull_policy: PullPolicy describes a policy for if/when to pull a container image
"""
pulumi.set(__self__, "repository", repository)
pulumi.set(__self__, "tag", tag)
if affinity is not None:
pulumi.set(__self__, "affinity", affinity)
if image_pull_policy is not None:
pulumi.set(__self__, "image_pull_policy", image_pull_policy)
if tolerations is not None:
pulumi.set(__self__, "tolerations", tolerations)
@property
@pulumi.getter
def repository(self) -> str:
return pulumi.get(self, "repository")
@property
@pulumi.getter
def tag(self) -> str:
return pulumi.get(self, "tag")
@property
@pulumi.getter
def affinity(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinity']:
"""
Affinity is a group of affinity scheduling rules.
"""
return pulumi.get(self, "affinity")
@property
@pulumi.getter(name="imagePullPolicy")
def image_pull_policy(self) -> Optional[str]:
"""
PullPolicy describes a policy for if/when to pull a container image
"""
return pulumi.get(self, "image_pull_policy")
@property
@pulumi.getter
def tolerations(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerTolerations']]:
return pulumi.get(self, "tolerations")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinity(dict):
"""
Affinity is a group of affinity scheduling rules.
"""
def __init__(__self__, *,
node_affinity: Optional['outputs.IBMBlockCSISpecControllerAffinityNodeAffinity'] = None,
pod_affinity: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinity'] = None,
pod_anti_affinity: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinity'] = None):
"""
Affinity is a group of affinity scheduling rules.
:param 'IBMBlockCSISpecControllerAffinityNodeAffinityArgs' node_affinity: Describes node affinity scheduling rules for the pod.
:param 'IBMBlockCSISpecControllerAffinityPodAffinityArgs' pod_affinity: Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
:param 'IBMBlockCSISpecControllerAffinityPodAntiAffinityArgs' pod_anti_affinity: Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
if node_affinity is not None:
pulumi.set(__self__, "node_affinity", node_affinity)
if pod_affinity is not None:
pulumi.set(__self__, "pod_affinity", pod_affinity)
if pod_anti_affinity is not None:
pulumi.set(__self__, "pod_anti_affinity", pod_anti_affinity)
@property
@pulumi.getter(name="nodeAffinity")
def node_affinity(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityNodeAffinity']:
"""
Describes node affinity scheduling rules for the pod.
"""
return pulumi.get(self, "node_affinity")
@property
@pulumi.getter(name="podAffinity")
def pod_affinity(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinity']:
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
"""
return pulumi.get(self, "pod_affinity")
@property
@pulumi.getter(name="podAntiAffinity")
def pod_anti_affinity(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinity']:
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
return pulumi.get(self, "pod_anti_affinity")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinity(dict):
"""
Describes node affinity scheduling rules for the pod.
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution'] = None):
"""
Describes node affinity scheduling rules for the pod.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.
:param 'IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs' required_during_scheduling_ignored_during_execution: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution']:
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op).
"""
def __init__(__self__, *,
preference: 'outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference',
weight: int):
"""
An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op).
:param 'IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceArgs' preference: A node selector term, associated with the corresponding weight.
:param int weight: Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.
"""
pulumi.set(__self__, "preference", preference)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def preference(self) -> 'outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference':
"""
A node selector term, associated with the corresponding weight.
"""
return pulumi.get(self, "preference")
@property
@pulumi.getter
def weight(self) -> int:
"""
Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference(dict):
"""
A node selector term, associated with the corresponding weight.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions']] = None,
match_fields: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields']] = None):
"""
A node selector term, associated with the corresponding weight.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressionsArgs'] match_expressions: A list of node selector requirements by node's labels.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFieldsArgs'] match_fields: A list of node selector requirements by node's fields.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_fields is not None:
pulumi.set(__self__, "match_fields", match_fields)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions']]:
"""
A list of node selector requirements by node's labels.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchFields")
def match_fields(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields']]:
"""
A list of node selector requirements by node's fields.
"""
return pulumi.get(self, "match_fields")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
def __init__(__self__, *,
node_selector_terms: Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms']):
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsArgs'] node_selector_terms: Required. A list of node selector terms. The terms are ORed.
"""
pulumi.set(__self__, "node_selector_terms", node_selector_terms)
@property
@pulumi.getter(name="nodeSelectorTerms")
def node_selector_terms(self) -> Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms']:
"""
Required. A list of node selector terms. The terms are ORed.
"""
return pulumi.get(self, "node_selector_terms")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms(dict):
"""
A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions']] = None,
match_fields: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields']] = None):
"""
A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressionsArgs'] match_expressions: A list of node selector requirements by node's labels.
:param Sequence['IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFieldsArgs'] match_fields: A list of node selector requirements by node's fields.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_fields is not None:
pulumi.set(__self__, "match_fields", match_fields)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions']]:
"""
A list of node selector requirements by node's labels.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchFields")
def match_fields(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields']]:
"""
A list of node selector requirements by node's fields.
"""
return pulumi.get(self, "match_fields")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinity(dict):
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution']] = None):
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
:param Sequence['IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs'] required_during_scheduling_ignored_during_execution: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution']]:
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
"""
def __init__(__self__, *,
pod_affinity_term: 'outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
weight: int):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
:param 'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermArgs' pod_affinity_term: Required. A pod affinity term, associated with the corresponding weight.
:param int weight: weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
pulumi.set(__self__, "pod_affinity_term", pod_affinity_term)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="podAffinityTerm")
def pod_affinity_term(self) -> 'outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm':
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
return pulumi.get(self, "pod_affinity_term")
@property
@pulumi.getter
def weight(self) -> int:
"""
weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm(dict):
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Required. A pod affinity term, associated with the corresponding weight.
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinity(dict):
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution']] = None):
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
:param Sequence['IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs'] required_during_scheduling_ignored_during_execution: If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution']]:
"""
If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
"""
def __init__(__self__, *,
pod_affinity_term: 'outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
weight: int):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
:param 'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermArgs' pod_affinity_term: Required. A pod affinity term, associated with the corresponding weight.
:param int weight: weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
pulumi.set(__self__, "pod_affinity_term", pod_affinity_term)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="podAffinityTerm")
def pod_affinity_term(self) -> 'outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm':
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
return pulumi.get(self, "pod_affinity_term")
@property
@pulumi.getter
def weight(self) -> int:
"""
weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm(dict):
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Required. A pod affinity term, associated with the corresponding weight.
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecControllerTolerations(dict):
"""
The pod this Toleration is attached to tolerates any taint that matches the triple <key,value,effect> using the matching operator <operator>.
"""
def __init__(__self__, *,
effect: Optional[str] = None,
key: Optional[str] = None,
operator: Optional[str] = None,
toleration_seconds: Optional[int] = None,
value: Optional[str] = None):
"""
The pod this Toleration is attached to tolerates any taint that matches the triple <key,value,effect> using the matching operator <operator>.
:param str effect: Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute.
:param str key: Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.
:param str operator: Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category.
:param int toleration_seconds: TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.
:param str value: Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.
"""
if effect is not None:
pulumi.set(__self__, "effect", effect)
if key is not None:
pulumi.set(__self__, "key", key)
if operator is not None:
pulumi.set(__self__, "operator", operator)
if toleration_seconds is not None:
pulumi.set(__self__, "toleration_seconds", toleration_seconds)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> Optional[str]:
"""
Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute.
"""
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> Optional[str]:
"""
Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter(name="tolerationSeconds")
def toleration_seconds(self) -> Optional[int]:
"""
TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.
"""
return pulumi.get(self, "toleration_seconds")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNode(dict):
"""
IBMBlockCSINodeSpec defines the desired state of IBMBlockCSINode
"""
def __init__(__self__, *,
repository: str,
tag: str,
affinity: Optional['outputs.IBMBlockCSISpecNodeAffinity'] = None,
image_pull_policy: Optional[str] = None,
tolerations: Optional[Sequence['outputs.IBMBlockCSISpecNodeTolerations']] = None):
"""
IBMBlockCSINodeSpec defines the desired state of IBMBlockCSINode
:param 'IBMBlockCSISpecNodeAffinityArgs' affinity: Affinity is a group of affinity scheduling rules.
:param str image_pull_policy: PullPolicy describes a policy for if/when to pull a container image
"""
pulumi.set(__self__, "repository", repository)
pulumi.set(__self__, "tag", tag)
if affinity is not None:
pulumi.set(__self__, "affinity", affinity)
if image_pull_policy is not None:
pulumi.set(__self__, "image_pull_policy", image_pull_policy)
if tolerations is not None:
pulumi.set(__self__, "tolerations", tolerations)
@property
@pulumi.getter
def repository(self) -> str:
return pulumi.get(self, "repository")
@property
@pulumi.getter
def tag(self) -> str:
return pulumi.get(self, "tag")
@property
@pulumi.getter
def affinity(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinity']:
"""
Affinity is a group of affinity scheduling rules.
"""
return pulumi.get(self, "affinity")
@property
@pulumi.getter(name="imagePullPolicy")
def image_pull_policy(self) -> Optional[str]:
"""
PullPolicy describes a policy for if/when to pull a container image
"""
return pulumi.get(self, "image_pull_policy")
@property
@pulumi.getter
def tolerations(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeTolerations']]:
return pulumi.get(self, "tolerations")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinity(dict):
"""
Affinity is a group of affinity scheduling rules.
"""
def __init__(__self__, *,
node_affinity: Optional['outputs.IBMBlockCSISpecNodeAffinityNodeAffinity'] = None,
pod_affinity: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinity'] = None,
pod_anti_affinity: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinity'] = None):
"""
Affinity is a group of affinity scheduling rules.
:param 'IBMBlockCSISpecNodeAffinityNodeAffinityArgs' node_affinity: Describes node affinity scheduling rules for the pod.
:param 'IBMBlockCSISpecNodeAffinityPodAffinityArgs' pod_affinity: Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
:param 'IBMBlockCSISpecNodeAffinityPodAntiAffinityArgs' pod_anti_affinity: Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
if node_affinity is not None:
pulumi.set(__self__, "node_affinity", node_affinity)
if pod_affinity is not None:
pulumi.set(__self__, "pod_affinity", pod_affinity)
if pod_anti_affinity is not None:
pulumi.set(__self__, "pod_anti_affinity", pod_anti_affinity)
@property
@pulumi.getter(name="nodeAffinity")
def node_affinity(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityNodeAffinity']:
"""
Describes node affinity scheduling rules for the pod.
"""
return pulumi.get(self, "node_affinity")
@property
@pulumi.getter(name="podAffinity")
def pod_affinity(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinity']:
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
"""
return pulumi.get(self, "pod_affinity")
@property
@pulumi.getter(name="podAntiAffinity")
def pod_anti_affinity(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinity']:
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
return pulumi.get(self, "pod_anti_affinity")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinity(dict):
"""
Describes node affinity scheduling rules for the pod.
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution'] = None):
"""
Describes node affinity scheduling rules for the pod.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.
:param 'IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs' required_during_scheduling_ignored_during_execution: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node matches the corresponding matchExpressions; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution']:
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op).
"""
def __init__(__self__, *,
preference: 'outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference',
weight: int):
"""
An empty preferred scheduling term matches all objects with implicit weight 0 (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op).
:param 'IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceArgs' preference: A node selector term, associated with the corresponding weight.
:param int weight: Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.
"""
pulumi.set(__self__, "preference", preference)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def preference(self) -> 'outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference':
"""
A node selector term, associated with the corresponding weight.
"""
return pulumi.get(self, "preference")
@property
@pulumi.getter
def weight(self) -> int:
"""
Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreference(dict):
"""
A node selector term, associated with the corresponding weight.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions']] = None,
match_fields: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields']] = None):
"""
A node selector term, associated with the corresponding weight.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressionsArgs'] match_expressions: A list of node selector requirements by node's labels.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFieldsArgs'] match_fields: A list of node selector requirements by node's fields.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_fields is not None:
pulumi.set(__self__, "match_fields", match_fields)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions']]:
"""
A list of node selector requirements by node's labels.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchFields")
def match_fields(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields']]:
"""
A list of node selector requirements by node's fields.
"""
return pulumi.get(self, "match_fields")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchExpressions(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityPreferredDuringSchedulingIgnoredDuringExecutionPreferenceMatchFields(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
"""
def __init__(__self__, *,
node_selector_terms: Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms']):
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to an update), the system may or may not try to eventually evict the pod from its node.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsArgs'] node_selector_terms: Required. A list of node selector terms. The terms are ORed.
"""
pulumi.set(__self__, "node_selector_terms", node_selector_terms)
@property
@pulumi.getter(name="nodeSelectorTerms")
def node_selector_terms(self) -> Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms']:
"""
Required. A list of node selector terms. The terms are ORed.
"""
return pulumi.get(self, "node_selector_terms")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTerms(dict):
"""
A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions']] = None,
match_fields: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields']] = None):
"""
A null or empty node selector term matches no objects. The requirements of them are ANDed. The TopologySelectorTerm type implements a subset of the NodeSelectorTerm.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressionsArgs'] match_expressions: A list of node selector requirements by node's labels.
:param Sequence['IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFieldsArgs'] match_fields: A list of node selector requirements by node's fields.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_fields is not None:
pulumi.set(__self__, "match_fields", match_fields)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions']]:
"""
A list of node selector requirements by node's labels.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchFields")
def match_fields(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields']]:
"""
A list of node selector requirements by node's fields.
"""
return pulumi.get(self, "match_fields")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchExpressions(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityNodeAffinityRequiredDuringSchedulingIgnoredDuringExecutionNodeSelectorTermsMatchFields(dict):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A node selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: The label key that the selector applies to.
:param str operator: Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
:param Sequence[str] values: An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
Represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
An array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinity(dict):
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution']] = None):
"""
Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
:param Sequence['IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs'] required_during_scheduling_ignored_during_execution: If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution']]:
"""
If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
"""
def __init__(__self__, *,
pod_affinity_term: 'outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
weight: int):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
:param 'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermArgs' pod_affinity_term: Required. A pod affinity term, associated with the corresponding weight.
:param int weight: weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
pulumi.set(__self__, "pod_affinity_term", pod_affinity_term)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="podAffinityTerm")
def pod_affinity_term(self) -> 'outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm':
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
return pulumi.get(self, "pod_affinity_term")
@property
@pulumi.getter
def weight(self) -> int:
"""
weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm(dict):
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Required. A pod affinity term, associated with the corresponding weight.
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinity(dict):
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
"""
def __init__(__self__, *,
preferred_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution']] = None,
required_during_scheduling_ignored_during_execution: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution']] = None):
"""
Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
:param Sequence['IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionArgs'] preferred_during_scheduling_ignored_during_execution: The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionArgs'] required_during_scheduling_ignored_during_execution: If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
if preferred_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "preferred_during_scheduling_ignored_during_execution", preferred_during_scheduling_ignored_during_execution)
if required_during_scheduling_ignored_during_execution is not None:
pulumi.set(__self__, "required_during_scheduling_ignored_during_execution", required_during_scheduling_ignored_during_execution)
@property
@pulumi.getter(name="preferredDuringSchedulingIgnoredDuringExecution")
def preferred_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution']]:
"""
The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, requiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the node(s) with the highest sum are the most preferred.
"""
return pulumi.get(self, "preferred_during_scheduling_ignored_during_execution")
@property
@pulumi.getter(name="requiredDuringSchedulingIgnoredDuringExecution")
def required_during_scheduling_ignored_during_execution(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution']]:
"""
If the anti-affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the anti-affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each podAffinityTerm are intersected, i.e. all terms must be satisfied.
"""
return pulumi.get(self, "required_during_scheduling_ignored_during_execution")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecution(dict):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
"""
def __init__(__self__, *,
pod_affinity_term: 'outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm',
weight: int):
"""
The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
:param 'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermArgs' pod_affinity_term: Required. A pod affinity term, associated with the corresponding weight.
:param int weight: weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
pulumi.set(__self__, "pod_affinity_term", pod_affinity_term)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="podAffinityTerm")
def pod_affinity_term(self) -> 'outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm':
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
return pulumi.get(self, "pod_affinity_term")
@property
@pulumi.getter
def weight(self) -> int:
"""
weight associated with matching the corresponding podAffinityTerm, in the range 1-100.
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTerm(dict):
"""
Required. A pod affinity term, associated with the corresponding weight.
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Required. A pod affinity term, associated with the corresponding weight.
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityPreferredDuringSchedulingIgnoredDuringExecutionPodAffinityTermLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecution(dict):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
"""
def __init__(__self__, *,
topology_key: str,
label_selector: Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector'] = None,
namespaces: Optional[Sequence[str]] = None):
"""
Defines a set of pods (namely those matching the labelSelector relative to the given namespace(s)) that this pod should be co-located (affinity) or not co-located (anti-affinity) with, where co-located is defined as running on a node whose value of the label with key <topologyKey> matches that of any node on which a pod of the set of pods is running
:param str topology_key: This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
:param 'IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorArgs' label_selector: A label query over a set of resources, in this case pods.
:param Sequence[str] namespaces: namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
pulumi.set(__self__, "topology_key", topology_key)
if label_selector is not None:
pulumi.set(__self__, "label_selector", label_selector)
if namespaces is not None:
pulumi.set(__self__, "namespaces", namespaces)
@property
@pulumi.getter(name="topologyKey")
def topology_key(self) -> str:
"""
This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching the labelSelector in the specified namespaces, where co-located is defined as running on a node whose value of the label with key topologyKey matches that of any node on which any of the selected pods is running. Empty topologyKey is not allowed.
"""
return pulumi.get(self, "topology_key")
@property
@pulumi.getter(name="labelSelector")
def label_selector(self) -> Optional['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector']:
"""
A label query over a set of resources, in this case pods.
"""
return pulumi.get(self, "label_selector")
@property
@pulumi.getter
def namespaces(self) -> Optional[Sequence[str]]:
"""
namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means "this pod's namespace"
"""
return pulumi.get(self, "namespaces")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelector(dict):
"""
A label query over a set of resources, in this case pods.
"""
def __init__(__self__, *,
match_expressions: Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']] = None,
match_labels: Optional[Mapping[str, str]] = None):
"""
A label query over a set of resources, in this case pods.
:param Sequence['IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressionsArgs'] match_expressions: matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param Mapping[str, str] match_labels: matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
if match_expressions is not None:
pulumi.set(__self__, "match_expressions", match_expressions)
if match_labels is not None:
pulumi.set(__self__, "match_labels", match_labels)
@property
@pulumi.getter(name="matchExpressions")
def match_expressions(self) -> Optional[Sequence['outputs.IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions']]:
"""
matchExpressions is a list of label selector requirements. The requirements are ANDed.
"""
return pulumi.get(self, "match_expressions")
@property
@pulumi.getter(name="matchLabels")
def match_labels(self) -> Optional[Mapping[str, str]]:
"""
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is "key", the operator is "In", and the values array contains only "value". The requirements are ANDed.
"""
return pulumi.get(self, "match_labels")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeAffinityPodAntiAffinityRequiredDuringSchedulingIgnoredDuringExecutionLabelSelectorMatchExpressions(dict):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
"""
def __init__(__self__, *,
key: str,
operator: str,
values: Optional[Sequence[str]] = None):
"""
A label selector requirement is a selector that contains values, a key, and an operator that relates the key and values.
:param str key: key is the label key that the selector applies to.
:param str operator: operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
:param Sequence[str] values: values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
key is the label key that the selector applies to.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecNodeTolerations(dict):
"""
The pod this Toleration is attached to tolerates any taint that matches the triple <key,value,effect> using the matching operator <operator>.
"""
def __init__(__self__, *,
effect: Optional[str] = None,
key: Optional[str] = None,
operator: Optional[str] = None,
toleration_seconds: Optional[int] = None,
value: Optional[str] = None):
"""
The pod this Toleration is attached to tolerates any taint that matches the triple <key,value,effect> using the matching operator <operator>.
:param str effect: Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute.
:param str key: Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.
:param str operator: Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category.
:param int toleration_seconds: TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.
:param str value: Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.
"""
if effect is not None:
pulumi.set(__self__, "effect", effect)
if key is not None:
pulumi.set(__self__, "key", key)
if operator is not None:
pulumi.set(__self__, "operator", operator)
if toleration_seconds is not None:
pulumi.set(__self__, "toleration_seconds", toleration_seconds)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> Optional[str]:
"""
Effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute.
"""
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
Key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> Optional[str]:
"""
Operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter(name="tolerationSeconds")
def toleration_seconds(self) -> Optional[int]:
"""
TolerationSeconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system.
"""
return pulumi.get(self, "toleration_seconds")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
Value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSISpecSidecars(dict):
def __init__(__self__, *,
name: str,
repository: str,
tag: str,
image_pull_policy: Optional[str] = None):
"""
:param str name: The name of the csi sidecar image
:param str repository: The repository of the csi sidecar image
:param str tag: The tag of the csi sidecar image
:param str image_pull_policy: The pullPolicy of the csi sidecar image
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "repository", repository)
pulumi.set(__self__, "tag", tag)
if image_pull_policy is not None:
pulumi.set(__self__, "image_pull_policy", image_pull_policy)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the csi sidecar image
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def repository(self) -> str:
"""
The repository of the csi sidecar image
"""
return pulumi.get(self, "repository")
@property
@pulumi.getter
def tag(self) -> str:
"""
The tag of the csi sidecar image
"""
return pulumi.get(self, "tag")
@property
@pulumi.getter(name="imagePullPolicy")
def image_pull_policy(self) -> Optional[str]:
"""
The pullPolicy of the csi sidecar image
"""
return pulumi.get(self, "image_pull_policy")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class IBMBlockCSIStatus(dict):
"""
IBMBlockCSIStatus defines the observed state of IBMBlockCSI
"""
def __init__(__self__, *,
controller_ready: bool,
node_ready: bool,
phase: str,
version: str):
"""
IBMBlockCSIStatus defines the observed state of IBMBlockCSI
:param str phase: Phase is the driver running phase
:param str version: Version is the current driver version
"""
pulumi.set(__self__, "controller_ready", controller_ready)
pulumi.set(__self__, "node_ready", node_ready)
pulumi.set(__self__, "phase", phase)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="controllerReady")
def controller_ready(self) -> bool:
return pulumi.get(self, "controller_ready")
@property
@pulumi.getter(name="nodeReady")
def node_ready(self) -> bool:
return pulumi.get(self, "node_ready")
@property
@pulumi.getter
def phase(self) -> str:
"""
Phase is the driver running phase
"""
return pulumi.get(self, "phase")
@property
@pulumi.getter
def version(self) -> str:
"""
Version is the current driver version
"""
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 59.617571
| 811
| 0.731994
| 18,376
| 161,504
| 6.297997
| 0.023999
| 0.012443
| 0.017973
| 0.026268
| 0.764283
| 0.763039
| 0.75941
| 0.751383
| 0.749663
| 0.747356
| 0
| 0.000449
| 0.200026
| 161,504
| 2,708
| 812
| 59.639586
| 0.895317
| 0.465549
| 0
| 0.804947
| 1
| 0
| 0.275774
| 0.228645
| 0
| 0
| 0
| 0
| 0
| 1
| 0.196466
| false
| 0
| 0.00424
| 0.048763
| 0.397173
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96b2746a6a7782cc75de0ecfbfba3cc8b173f98c
| 3,355
|
py
|
Python
|
unit_tests/Add_land_charge/validation/test_location_confirmation_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2019-10-03T13:58:29.000Z
|
2019-10-03T13:58:29.000Z
|
unit_tests/Add_land_charge/validation/test_location_confirmation_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | null | null | null |
unit_tests/Add_land_charge/validation/test_location_confirmation_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2021-04-11T05:24:57.000Z
|
2021-04-11T05:24:57.000Z
|
from unittest import TestCase
from unittest.mock import patch, call
from maintain_frontend.add_land_charge.validation.location_confirmation_validator import LocationConfirmationValidator
class TestLocationConfirmationValidator(TestCase):
@patch('maintain_frontend.add_land_charge.validation.location_confirmation_validator.ValidationErrorBuilder')
@patch('maintain_frontend.add_land_charge.validation.location_confirmation_validator.FieldValidator')
def test_params_passed_when_action_is_add(self, mock_field_validator, mock_error_builder):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
LocationConfirmationValidator.validate(True, 'add')
calls = [
call(True, 'location-confirmation', None, mock_error_builder(),
summary_message='Confirm that you have the authority to add this charge',
inline_message='If the charge is in your authority, tick and continue. '
'If the charge is in another authority, get permission from that authority.'),
call().is_required()
]
mock_field_validator.assert_has_calls(calls)
@patch('maintain_frontend.add_land_charge.validation.location_confirmation_validator.ValidationErrorBuilder')
@patch('maintain_frontend.add_land_charge.validation.location_confirmation_validator.FieldValidator')
def test_params_passed(self, mock_field_validator, mock_error_builder):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
LocationConfirmationValidator.validate(True, 'vary')
calls = [
call(True, 'location-confirmation', None, mock_error_builder(),
summary_message='Confirm that you have the authority to update this charge',
inline_message='If the charge is in your authority, tick and continue. '
'If the charge is in another authority, get permission from that authority.'),
call().is_required()
]
mock_field_validator.assert_has_calls(calls)
def test_validation_failed_when_input_is_blank_and_action_is_add(self):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
result = LocationConfirmationValidator.validate('', 'add')
self.assertEqual(1, len(result.errors))
def test_validation_failed_when_input_is_blank_and_action_is_vary(self):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
result = LocationConfirmationValidator.validate('', 'vary')
self.assertEqual(1, len(result.errors))
def test_validation_passes_with_valid_input_and_action_is_add(self):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
result = LocationConfirmationValidator.validate(True, 'add')
self.assertEqual(0, len(result.errors))
def test_validation_passes_with_valid_input_and_action_is_vary(self):
"""should pass the given parameter to the fieldset validator and call the expected validations"""
result = LocationConfirmationValidator.validate(True, 'vary')
self.assertEqual(0, len(result.errors))
| 53.253968
| 118
| 0.734426
| 389
| 3,355
| 6.084833
| 0.205656
| 0.059147
| 0.032953
| 0.045627
| 0.920997
| 0.920997
| 0.901141
| 0.901141
| 0.901141
| 0.852556
| 0
| 0.001476
| 0.19225
| 3,355
| 62
| 119
| 54.112903
| 0.871956
| 0.164232
| 0
| 0.5
| 0
| 0
| 0.292824
| 0.152182
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.15
| false
| 0.1
| 0.075
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
73658723c7cd14c0c24801df09fd5786d997fd20
| 5,493
|
py
|
Python
|
tests/test_connect_four.py
|
nmastrapasqua/PythonPlay
|
b401146a4acb3352515f141760a325f046f46bd9
|
[
"MIT"
] | null | null | null |
tests/test_connect_four.py
|
nmastrapasqua/PythonPlay
|
b401146a4acb3352515f141760a325f046f46bd9
|
[
"MIT"
] | null | null | null |
tests/test_connect_four.py
|
nmastrapasqua/PythonPlay
|
b401146a4acb3352515f141760a325f046f46bd9
|
[
"MIT"
] | null | null | null |
import unittest
from games.connect_four import ConnectFourGame, MiniMaxPlayer, EMPTY, AI_PIECE, PLAYER_PIECE
class TestConnectFourClass(unittest.TestCase):
def setUp(self):
self.game = ConnectFourGame(AI_PIECE, PLAYER_PIECE)
self.aiPlayer = MiniMaxPlayer(AI_PIECE)
def test_is_moves_left(self):
self.assertEqual(self.game.is_moves_left(), True)
for row in range(0, 6):
for col in range (0, 7):
self.game.do_move(col, AI_PIECE)
self.assertEqual(self.game.is_moves_left(), False)
def test_valid_moves(self):
expected = [0, 1, 2, 3 , 4, 5, 6]
moves = self.game.valid_moves()
self.assertListEqual(moves, expected)
for i in range (6):
self.game.do_move(0, AI_PIECE)
expected = [1, 2, 3 , 4, 5, 6]
moves = self.game.valid_moves()
self.assertListEqual(moves, expected)
self.game.do_move(1, PLAYER_PIECE)
moves = self.game.valid_moves()
self.assertListEqual(moves, expected)
for i in range (5):
self.game.do_move(1, PLAYER_PIECE)
expected = [2, 3 , 4, 5, 6]
moves = self.game.valid_moves()
self.assertListEqual(moves, expected)
for i in range (6):
self.game.do_move(4, AI_PIECE)
expected = [2, 3 , 5, 6]
moves = self.game.valid_moves()
self.assertListEqual(moves, expected)
def test_undo(self):
self.game.do_move(4, AI_PIECE)
board = self.game.get_board()
self.assertEqual(board[0][4], AI_PIECE)
self.game.undo()
self.assertEqual(board[0][4], EMPTY)
self.game.do_move(4, AI_PIECE)
self.game.do_move(5, AI_PIECE)
self.game.do_move(6, PLAYER_PIECE)
self.game.do_move(6, PLAYER_PIECE)
self.assertEqual(board[0][4], AI_PIECE)
self.assertEqual(board[0][5], AI_PIECE)
self.assertEqual(board[0][6], PLAYER_PIECE)
self.assertEqual(board[1][6], PLAYER_PIECE)
for i in range(3):
self.game.undo()
self.assertEqual(board[0][4], AI_PIECE)
self.assertEqual(board[0][5], EMPTY)
self.assertEqual(board[0][6], EMPTY)
self.assertEqual(board[1][6], EMPTY)
self.game.undo()
self.assertEqual(board[0][4], EMPTY)
def test_get_winner(self):
self.game.do_move(0, AI_PIECE)
self.game.do_move(0, AI_PIECE)
self.game.do_move(0, AI_PIECE)
self.game.do_move(0, AI_PIECE)
self.assertEqual(self.game.check_win(AI_PIECE), True)
self.assertEqual(self.game.check_win(PLAYER_PIECE), False)
for i in range(4):
self.game.undo()
self.assertEqual(self.game.check_win(AI_PIECE), False)
self.assertEqual(self.game.check_win(PLAYER_PIECE), False)
for i in range(0, 4):
self.game.do_move(i, PLAYER_PIECE)
self.assertEqual(self.game.check_win(AI_PIECE), False)
self.assertEqual(self.game.check_win(PLAYER_PIECE), True)
for i in range(4):
self.game.undo()
self.assertEqual(self.game.check_win(AI_PIECE), False)
self.assertEqual(self.game.check_win(PLAYER_PIECE), False)
self.game.do_move(0, PLAYER_PIECE)
self.game.do_move(1, PLAYER_PIECE)
self.game.do_move(1, PLAYER_PIECE)
self.game.do_move(2, PLAYER_PIECE)
self.game.do_move(2, PLAYER_PIECE)
self.game.do_move(2, PLAYER_PIECE)
self.game.do_move(3, AI_PIECE)
self.game.do_move(3, AI_PIECE)
self.game.do_move(3, AI_PIECE)
self.game.do_move(3, PLAYER_PIECE)
self.assertEqual(self.game.check_win(AI_PIECE), False)
self.assertEqual(self.game.check_win(PLAYER_PIECE), True)
for i in range(10):
self.game.undo()
self.assertEqual(self.game.check_win(AI_PIECE), False)
self.assertEqual(self.game.check_win(PLAYER_PIECE), False)
self.game.do_move(6, PLAYER_PIECE)
self.game.do_move(6, PLAYER_PIECE)
self.game.do_move(6, PLAYER_PIECE)
self.game.do_move(6, AI_PIECE)
self.game.do_move(5, PLAYER_PIECE)
self.game.do_move(5, PLAYER_PIECE)
self.game.do_move(5, AI_PIECE)
self.game.do_move(4, PLAYER_PIECE)
self.game.do_move(4, AI_PIECE)
self.game.do_move(3, AI_PIECE)
self.assertEqual(self.game.check_win(AI_PIECE), True)
self.assertEqual(self.game.check_win(PLAYER_PIECE), False)
def test_evaluate(self):
self.assertEqual(self.aiPlayer.evaluate(self.game, 0), 0)
self.game.do_move(3, self.aiPlayer.get_id())
self.assertEqual(self.aiPlayer.evaluate(self.game, 0), 7)
self.game.do_move(3, self.aiPlayer.get_id())
self.assertEqual(self.aiPlayer.evaluate(self.game, 0), 17)
self.game.undo()
self.game.undo()
self.game.do_move(6, AI_PIECE)
self.game.do_move(6, AI_PIECE)
self.game.do_move(6, AI_PIECE)
self.game.do_move(6, PLAYER_PIECE)
self.game.do_move(5, AI_PIECE)
self.game.do_move(5, AI_PIECE)
self.game.do_move(5, PLAYER_PIECE)
self.game.do_move(4, AI_PIECE)
self.game.do_move(4, PLAYER_PIECE)
self.game.do_move(2, AI_PIECE)
self.game.do_move(2, AI_PIECE)
self.assertEqual(self.aiPlayer.evaluate(self.game, 0), 13)
if __name__ == '__main__':
unittest.main()
| 32.311765
| 92
| 0.625705
| 805
| 5,493
| 4.067081
| 0.074534
| 0.20281
| 0.14661
| 0.205254
| 0.851252
| 0.827123
| 0.801466
| 0.753207
| 0.715027
| 0.680819
| 0
| 0.027603
| 0.248134
| 5,493
| 170
| 93
| 32.311765
| 0.765133
| 0
| 0
| 0.68
| 0
| 0
| 0.001456
| 0
| 0
| 0
| 0
| 0
| 0.288
| 1
| 0.048
| false
| 0
| 0.016
| 0
| 0.072
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
73784cf5f7d50b4bb600bdd035cfa127c6e499b6
| 83
|
py
|
Python
|
vaxthesat/python/vax_common/vax_common/random_seed.py
|
cypher-me/HAS-Qualifier-Challenges
|
bb795303716155dad4a930880a58fecb5d9b50c5
|
[
"MIT"
] | 75
|
2020-07-20T20:54:00.000Z
|
2022-03-09T09:18:37.000Z
|
vaxthesat/python/vax_common/vax_common/random_seed.py
|
cypher-me/HAS-Qualifier-Challenges
|
bb795303716155dad4a930880a58fecb5d9b50c5
|
[
"MIT"
] | 3
|
2020-09-13T00:46:49.000Z
|
2021-07-06T16:18:22.000Z
|
vaxthesat/python/vax_common/vax_common/random_seed.py
|
cypher-me/HAS-Qualifier-Challenges
|
bb795303716155dad4a930880a58fecb5d9b50c5
|
[
"MIT"
] | 14
|
2020-07-22T16:34:51.000Z
|
2021-09-13T12:19:59.000Z
|
import os
def get_random_seed() -> int:
return int(os.getenv("SEED", "0"))
| 10.375
| 38
| 0.614458
| 13
| 83
| 3.769231
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.204819
| 83
| 7
| 39
| 11.857143
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
738b3e12b1e376068953d46eb6b8c4f22e773d67
| 5,671
|
py
|
Python
|
Scripts Python - Treinamento e Classificacao/geraStopWords.py
|
covid-news/classificador
|
d674bd1146d0d72f328ac7ec1d0ba2804873c57a
|
[
"MIT"
] | null | null | null |
Scripts Python - Treinamento e Classificacao/geraStopWords.py
|
covid-news/classificador
|
d674bd1146d0d72f328ac7ec1d0ba2804873c57a
|
[
"MIT"
] | null | null | null |
Scripts Python - Treinamento e Classificacao/geraStopWords.py
|
covid-news/classificador
|
d674bd1146d0d72f328ac7ec1d0ba2804873c57a
|
[
"MIT"
] | null | null | null |
import xlsxwriter
import pandas as pd
import xlrd as xlrd
stopWords = [ 'a', 'à', 'agora', 'ainda', 'alguém', 'algum', 'alguma', 'algumas', 'alguns', 'ampla', 'amplas', 'amplo', 'amplos', 'ante', 'antes', 'ao', 'aos', 'após', 'aquela', 'aquelas', 'aquele', 'aqueles', 'aquilo', 'as', 'às', 'até', 'através', 'cada', 'coisa', 'coisas', 'com', 'como', 'contra', 'contudo', 'da', 'daquele', 'daqueles', 'das', 'de', 'dela', 'delas', 'dele', 'deles', 'depois', 'dessa', 'dessas', 'desse', 'desses', 'desta', 'destas', 'deste', 'destes', 'deve', 'devem', 'devendo', 'dever', 'deverá', 'deverão', 'deveria', 'deveriam', 'devia', 'deviam', 'disse', 'disso', 'disto', 'dito', 'diz', 'dizem', 'do', 'dos','e','é','ela','elas','ele','eles','em','entre','era','eram','éramos','essa','essas','esse','esses','esta','está','estamos','estão','estas','estava','estavam','estávamos','este','esteja','estejam','estejamos','estes','esteve','estive','estivemos','estiver','estivera','estiveram','estivéramos','estiverem','estivermos','estivesse','estivessem','estivéssemos','estou','eu','foi','fomos','for','fora','foram','fôramos','forem','formos','fosse','fossem','fôssemos','fui','há','haja','hajam','hajamos','hão','havemos','havia','hei','houve','houvemos','houver','houvera','houverá','houveram','houvéramos','houverão','houverei','houverem','houveremos','houveria','houveriam','houveríamos','houvermos','houvesse','houvessem','houvéssemos','isso','isto','já','lhe','lhes','mais','mas','me','mesmo','meu','meus','minha','minhas','muito','na','não','nas','nem','no','nos','nós','nossa','nossas','nosso','nossos','num','numa','o','os','ou','para','pela','pelas','pelo','pelos','por','qual','quando','que','quem','são','se','seja','sejam','sejamos','sem','ser','será','serão','serei','seremos','seria','seriam','seríamos','seu','seus','só','sobre','somos','sou','sua','suas','também','te','tem','têm','temos','tenha','tenham','tenhamos','tenho','ter','terá','terão','terei','teremos','teria','teriam','teríamos','teu','teus','teve','tinha','tinham','tínhamos','tive','tivemos','tiver','tivera','tiveram','tivéramos','tiverem','tivermos','tivesse','tivessem','tivéssemos','tu','tua','tuas','um','uma','você','vocês','vos','A','À','Agora','Ainda','Alguém','Algum','Alguma','Algumas','Alguns','Ampla','Amplas','Amplo','Amplos','Ante','Antes','Ao','Aos','Após','Aquela','Aquelas','Aquele','Aqueles','Aquilo','As','Às','Até','Através','Cada','Coisa','Coisas','Com','Como','Contra','Contudo','Da','Daquele','Daqueles','Das','De','Dela','Delas','Dele','Deles','Depois','Dessa','Dessas','Desse','Desses','Desta','Destas','Deste','Destes','Deve','Devem','Devendo','Dever','Deverá','Deverão','Deveria','Deveriam','Devia','Deviam','Disse','Disso','Disto','Dito','Diz','Dizem','Do','Dos','E','É','Ela','Elas','Ele','Eles','Em','Entre','Era','Eram','Éramos','Essa','Essas','Esse','Esses','Esta','Está','Estamos','Estão','Estas','Estava','Estavam','Estávamos','Este','Esteja','Estejam','Estejamos','Estes','Esteve','Estive','Estivemos','Estiver','Estivera','Estiveram','Estivéramos','Estiverem','Estivermos','Estivesse','Estivessem','Estivéssemos','Estou','Eu','Foi','Fomos','For','Fora','Foram','Fôramos','Forem','Formos','Fosse','Fossem','Fôssemos','Fui','Há','Haja','Hajam','Hajamos','Hão','Havemos','Havia','Hei','Houve','Houvemos','Houver','Houvera','Houverá','Houveram','Houvéramos','Houverão','Houverei','Houverem','Houveremos','Houveria','Houveriam','Houveríamos','Houvermos','Houvesse','Houvessem','Houvéssemos','Isso','Isto','Já','Lhe','Lhes','Mais','Mas','Me','Mesmo','Meu','Meus','Minha','Minhas','Muito','Na','Não','Nas','Nem','No','Nos','Nós','Nossa','Nossas','Nosso','Nossos','Num','Numa','O','Os','Ou','Para','Pela','Pelas','Pelo','Pelos','Por','Qual','Quando','Que','Quem','São','Se','Seja','Sejam','Sejamos','Sem','Ser','Será','Serão','Serei','Seremos','Seria','Seriam','Seríamos','Seu','Seus','Só','Sobre','Somos','Sou','Sua','Suas','Também','Te','Tem','Têm','Temos','Tenha','Tenham','Tenhamos','Tenho','Ter','Terá','Terão','Terei','Teremos','Teria','Teriam','Teríamos','Teu','Teus','Teve','Tinham','Tínhamos','Tive','Tivemos','Tiver','Tivera','Tiveram','Tivéramos','Tiverem','Tivermos','Tivesse','Tivessem','Tivéssemos','Tu','Tua','Tuas','Um','Uma','Você','Vocês', 'Vos' ]
# stopWordsSheet = pd.read_excel('Stop Words.xlsx', header = None, names = ['A'])
# stopWords = stopWordsSheet['A']
#firstNew = "João Alberto foi espancado por mais de 5 minutos antes de morrer, diz polícia"
#secondNew = "Após início pacífico, protesto teve confusão em Porto Alegre"
newsSheet = pd.read_excel('Test Set_v1 Covid.xlsx', header = None, names = ['A', 'B'])
news = newsSheet['A']
labels = newsSheet['B']
workbook = xlsxwriter.Workbook( 'Test Set_v1 Covid SW.xlsx' )
worksheet = workbook.add_worksheet()
#firstSplited = firstNew.split()
#secondSplited = secondNew.split()
# firstSplitedResult = []
# secondSplitedResult = []
splited = []
splitedResult = []
""" for i in range( len(firstSplited) ):
if firstSplited[i] not in stopWords:
firstSplitedResult.append( firstSplited[i] ) """
for i in range( len(news) ):
splited = news[i].split()
for j in range( len(splited) ):
if splited[j] not in stopWords: splitedResult.append( splited[j] )
worksheet.write( i, 0, " ".join(splitedResult) )
splitedResult.clear()
row = 0
for k in labels:
worksheet.write( row, 1, k )
row += 1
workbook.close()
""" firstResult = " ".join(firstSplitedResult)
secondResult = " ".join(secondSplitedResult)
print("First new: " + firstNew)
print("First result: " + firstResult)
print()
print("Second new: " + secondNew)
print("Second result: " + secondResult) """
| 101.267857
| 4,203
| 0.636925
| 688
| 5,671
| 5.242733
| 0.473837
| 0.005822
| 0.008317
| 0.006654
| 0.737455
| 0.718603
| 0.718603
| 0.718603
| 0.718603
| 0.718603
| 0
| 0.00131
| 0.058014
| 5,671
| 56
| 4,204
| 101.267857
| 0.673905
| 0.068595
| 0
| 0
| 0
| 0
| 0.541939
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.136364
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7c527f88c3b00962dafca80f14909fc236f28f2
| 164
|
py
|
Python
|
ramda/to_pairs_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/to_pairs_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/to_pairs_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from ramda import *
from ramda.private.asserts import *
def to_pairs_test():
assert_equal(to_pairs({"a": 1, "b": 2, "c": 3}), [["a", 1], ["b", 2], ["c", 3]])
| 23.428571
| 84
| 0.560976
| 28
| 164
| 3.142857
| 0.607143
| 0.204545
| 0.068182
| 0.090909
| 0.136364
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0.044118
| 0.170732
| 164
| 6
| 85
| 27.333333
| 0.602941
| 0
| 0
| 0
| 0
| 0
| 0.036585
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e39326b84ead0390f581aedb2604f19762b680d2
| 19,929
|
py
|
Python
|
rapid7vmconsole/api/vulnerability_check_api.py
|
kiblik/vm-console-client-python
|
038f6d33e8b2654a558326c6eb87f09ee23e0e22
|
[
"MIT"
] | 61
|
2018-05-17T05:57:09.000Z
|
2022-03-08T13:59:21.000Z
|
rapid7vmconsole/api/vulnerability_check_api.py
|
kiblik/vm-console-client-python
|
038f6d33e8b2654a558326c6eb87f09ee23e0e22
|
[
"MIT"
] | 33
|
2018-06-26T16:21:14.000Z
|
2022-03-03T20:55:47.000Z
|
rapid7vmconsole/api/vulnerability_check_api.py
|
kiblik/vm-console-client-python
|
038f6d33e8b2654a558326c6eb87f09ee23e0e22
|
[
"MIT"
] | 43
|
2018-02-24T05:45:53.000Z
|
2022-03-31T22:15:16.000Z
|
# coding: utf-8
"""
Python InsightVM API Client
OpenAPI spec version: 3
Contact: support@rapid7.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from rapid7vmconsole.api_client import ApiClient
class VulnerabilityCheckApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_vulnerability_check_types(self, **kwargs): # noqa: E501
"""Check Types # noqa: E501
Returns the vulnerability check types. The type groups related vulnerability checks by their purpose, property, or related characteristic. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_check_types(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ReferencesWithVulnerabilityCheckTypeIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_vulnerability_check_types_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_vulnerability_check_types_with_http_info(**kwargs) # noqa: E501
return data
def get_vulnerability_check_types_with_http_info(self, **kwargs): # noqa: E501
"""Check Types # noqa: E501
Returns the vulnerability check types. The type groups related vulnerability checks by their purpose, property, or related characteristic. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_check_types_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ReferencesWithVulnerabilityCheckTypeIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vulnerability_check_types" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/vulnerability_checks_types', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferencesWithVulnerabilityCheckTypeIDLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_vulnerability_checks(self, **kwargs): # noqa: E501
"""Checks # noqa: E501
Returns vulnerability checks. Optional search and filtering parameters may be supplied to refine the results. Searching allows full text search of the vulnerability details a check is related to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_checks(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str search: Vulnerability search term to find vulnerability checks for. e.g. `\"ssh\"`.
:param bool safe: Whether to return vulnerability checks that are considered \"safe\" to run. Defaults to return safe and unsafe checks.
:param bool potential: Whether to only return checks that result in potentially vulnerable results. Defaults to return all checks.
:param bool requires_credentials: Whether to only return checks that require credentials in order to successfully execute. Defaults to return all checks.
:param bool unique: Whether to only return checks that guarantee to be executed once-and-only once on a host resulting in a unique result. False returns checks that can result in multiple occurrences of the same vulnerability on a host.
:param str type: The type of vulnerability checks to return. See <a href=\"#operation/vulnerabilityCheckTypesUsingGET\">Check Types</a> for all available types.
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfVulnerabilityCheck
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_vulnerability_checks_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_vulnerability_checks_with_http_info(**kwargs) # noqa: E501
return data
def get_vulnerability_checks_with_http_info(self, **kwargs): # noqa: E501
"""Checks # noqa: E501
Returns vulnerability checks. Optional search and filtering parameters may be supplied to refine the results. Searching allows full text search of the vulnerability details a check is related to. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_checks_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str search: Vulnerability search term to find vulnerability checks for. e.g. `\"ssh\"`.
:param bool safe: Whether to return vulnerability checks that are considered \"safe\" to run. Defaults to return safe and unsafe checks.
:param bool potential: Whether to only return checks that result in potentially vulnerable results. Defaults to return all checks.
:param bool requires_credentials: Whether to only return checks that require credentials in order to successfully execute. Defaults to return all checks.
:param bool unique: Whether to only return checks that guarantee to be executed once-and-only once on a host resulting in a unique result. False returns checks that can result in multiple occurrences of the same vulnerability on a host.
:param str type: The type of vulnerability checks to return. See <a href=\"#operation/vulnerabilityCheckTypesUsingGET\">Check Types</a> for all available types.
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfVulnerabilityCheck
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search', 'safe', 'potential', 'requires_credentials', 'unique', 'type', 'page', 'size', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vulnerability_checks" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'search' in params:
query_params.append(('search', params['search'])) # noqa: E501
if 'safe' in params:
query_params.append(('safe', params['safe'])) # noqa: E501
if 'potential' in params:
query_params.append(('potential', params['potential'])) # noqa: E501
if 'requires_credentials' in params:
query_params.append(('requiresCredentials', params['requires_credentials'])) # noqa: E501
if 'unique' in params:
query_params.append(('unique', params['unique'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
collection_formats['sort'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/vulnerability_checks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOfVulnerabilityCheck', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_vulnerability_checks_for_vulnerability(self, id, **kwargs): # noqa: E501
"""Vulnerability Checks # noqa: E501
Returns the vulnerability checks that assess for a specific vulnerability during a scan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_checks_for_vulnerability(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the vulnerability. (required)
:return: ReferencesWithVulnerabilityCheckIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_vulnerability_checks_for_vulnerability_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_vulnerability_checks_for_vulnerability_with_http_info(id, **kwargs) # noqa: E501
return data
def get_vulnerability_checks_for_vulnerability_with_http_info(self, id, **kwargs): # noqa: E501
"""Vulnerability Checks # noqa: E501
Returns the vulnerability checks that assess for a specific vulnerability during a scan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vulnerability_checks_for_vulnerability_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the vulnerability. (required)
:return: ReferencesWithVulnerabilityCheckIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vulnerability_checks_for_vulnerability" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_vulnerability_checks_for_vulnerability`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/vulnerabilities/{id}/checks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferencesWithVulnerabilityCheckIDLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def vulnerability_check(self, id, **kwargs): # noqa: E501
"""Check # noqa: E501
Returns the vulnerability check. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.vulnerability_check(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the vulnerability check. (required)
:return: VulnerabilityCheck
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.vulnerability_check_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.vulnerability_check_with_http_info(id, **kwargs) # noqa: E501
return data
def vulnerability_check_with_http_info(self, id, **kwargs): # noqa: E501
"""Check # noqa: E501
Returns the vulnerability check. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.vulnerability_check_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The identifier of the vulnerability check. (required)
:return: VulnerabilityCheck
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method vulnerability_check" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `vulnerability_check`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/vulnerability_checks/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VulnerabilityCheck', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.286667
| 244
| 0.639219
| 2,325
| 19,929
| 5.27828
| 0.102366
| 0.045632
| 0.018253
| 0.023468
| 0.914113
| 0.887549
| 0.886245
| 0.877282
| 0.874022
| 0.871741
| 0
| 0.015708
| 0.274876
| 19,929
| 449
| 245
| 44.385301
| 0.833506
| 0.415977
| 0
| 0.726496
| 1
| 0
| 0.182612
| 0.072669
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.017094
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3a89bbafd98074aa698dcd93bccfd3d14c8961f
| 17,814
|
py
|
Python
|
userbot/plugins/nekos_gifs.py
|
meaall-com/Telebot
|
a08193ae6c3e5814b309d079e95c4951eafcbc19
|
[
"MIT"
] | null | null | null |
userbot/plugins/nekos_gifs.py
|
meaall-com/Telebot
|
a08193ae6c3e5814b309d079e95c4951eafcbc19
|
[
"MIT"
] | null | null | null |
userbot/plugins/nekos_gifs.py
|
meaall-com/Telebot
|
a08193ae6c3e5814b309d079e95c4951eafcbc19
|
[
"MIT"
] | null | null | null |
# Made for TeleBot
# Re-written by @
# Kangers kwwp the credits
#Made by @
#From Nekos API
import datetime
from telethon import events
from telethon.errors.rpcerrorlist import YouBlockedUserError
from telethon.tl.functions.account import UpdateNotifySettingsRequest
from uniborg.util import admin_cmd
@borg.on(admin_cmd("sologif"))
async def _(event):
if event.fwd_from:
return
chat = ""
await event.edit("```Finding an Anime Solo GIF..```\n**It's Barely SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/sologif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("cumgif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Henti Cum GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/cumgif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("ngif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Neko GIF..```\n**It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/ngif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("tickle"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Tickle GIF..```\n**It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/tickle")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("feed"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Feeding GIF..```\n**It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/feed")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("bjgif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Blow Job GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/bjgif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("analgif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Anal GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/bj")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("poke"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Poke GIF..```\n**Oh! It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/poke")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("pussygif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Pussy GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/pussygif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("hentaigif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/hentaigif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("classic"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Classic GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/classic")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("kuni"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Pussy Lick GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/kuni")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("cuddle"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Cuddle GIF..```\n**WARNING : It's Really Kawaii**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/cuddle")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("titsgif"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding a Hentai Tits GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/titsgif")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#by @WhySooSerious
@borg.on(admin_cmd("smug"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Smug GIF..```\n**It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/smug")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("baka"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an Anime Baka GIF..```\n**It's SFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/bj")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await event.delete()
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("lesbian"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an A Hentai Lesbian GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/lesbian")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("nsfwneko"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an A Hentai Neko GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/nekonsfw")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
@borg.on(admin_cmd("kiss"))
async def _(event):
if event.fwd_from:
return
chat = "@KeikoSDbot"
await event.edit("```Finding an an Anime Kissing GIF..```\n**WARNING : It's NSFW**")
async with borg.conversation(chat) as conv:
try:
response = conv.wait_event(events.NewMessage(incoming=True,from_users=1212429864))
await borg.send_message(chat, "/kiss")
response = await response
except YouBlockedUserError:
await event.reply("```Please unblock @KeikoSDbot and try again```")
return
if response.text.startswith("Forward"):
await event.edit("```can you kindly disable your forward privacy settings for good?```")
else:
await borg.send_file(event.chat_id, response.message.media)
#By @WhySooSerious
| 44.758794
| 105
| 0.587291
| 1,968
| 17,814
| 5.238313
| 0.064024
| 0.070812
| 0.051605
| 0.025803
| 0.941507
| 0.940149
| 0.937433
| 0.937433
| 0.937433
| 0.933844
| 0
| 0.015255
| 0.300831
| 17,814
| 397
| 106
| 44.871537
| 0.812445
| 0.022679
| 0
| 0.831956
| 0
| 0
| 0.223982
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.013774
| 0
| 0.118457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3a8c11feec341a1413885256a020a2284e9d9af
| 67
|
py
|
Python
|
learn_dash_1.py
|
rblcoder/Learn_dash
|
e8b6bebceafae7fc325b091c6e5e8b5db9e16abd
|
[
"Apache-2.0"
] | null | null | null |
learn_dash_1.py
|
rblcoder/Learn_dash
|
e8b6bebceafae7fc325b091c6e5e8b5db9e16abd
|
[
"Apache-2.0"
] | null | null | null |
learn_dash_1.py
|
rblcoder/Learn_dash
|
e8b6bebceafae7fc325b091c6e5e8b5db9e16abd
|
[
"Apache-2.0"
] | null | null | null |
import dash_core_components
print(dash_core_components.__version__)
| 33.5
| 39
| 0.925373
| 9
| 67
| 6
| 0.666667
| 0.296296
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029851
| 67
| 2
| 39
| 33.5
| 0.830769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
e3be2d6504a3317daa73cd3f7db95f0309c536b0
| 103,447
|
py
|
Python
|
Thrift/gen-py/SpotifakeServices/ConsumerService.py
|
BrunoLujan/Spotifake-DESER
|
a811444af0a1326659dd27949c6a1c66c7cd66a1
|
[
"Apache-2.0"
] | null | null | null |
Thrift/gen-py/SpotifakeServices/ConsumerService.py
|
BrunoLujan/Spotifake-DESER
|
a811444af0a1326659dd27949c6a1c66c7cd66a1
|
[
"Apache-2.0"
] | null | null | null |
Thrift/gen-py/SpotifakeServices/ConsumerService.py
|
BrunoLujan/Spotifake-DESER
|
a811444af0a1326659dd27949c6a1c66c7cd66a1
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
"""
This file describes the services
that needs to be passed to the API methods in order to
manage Consumer and Content Creator users and Content.
"""
def GetConsumerById(self, idConsumer):
"""
Get Consumer by Id
@param idConsumer
The Consumer Id to be obtained.
@return Consumer
Consumer object
Parameters:
- idConsumer
"""
pass
def GetConsumerByEmail(self, email):
"""
Get Consumer by email
@param email
The Consumer email to be obtained.
@return bool
bool object
Parameters:
- email
"""
pass
def GetConsumerByEmailPassword(self, email, password):
"""
Get Consumer by email and password
@param email
The Consumer email to be obtained.
@param password
The Consumer password to be obtained.
@return Consumer
Consumer object
Parameters:
- email
- password
"""
pass
def AddConsumer(self, newConsumer):
"""
Register a Consumer.
@param newconsumer
@return Consumer
Consumer object added
Parameters:
- newConsumer
"""
pass
def DeleteConsumer(self, email):
"""
Delete a Consumer
@param email
The Consumer email of the Consumer to be deleted.
@return Id
The Consumer Id of the Consumer deleted.
Parameters:
- email
"""
pass
def UpdateConsumerPassword(self, email, newPassword):
"""
Update previously registered Consumer password.
@param email
The Consumer Email of the Consumer which require an update password.
@return Consumer
Modified Consumer obejct.
Parameters:
- email
- newPassword
"""
pass
def UpdateConsumerImage(self, email, fileName):
"""
Update previously registered Consumer image.
@param email
The Consumer Email of the Consumer which require an update image.
@return Consumer
Modified Consumer obejct.
Parameters:
- email
- fileName
"""
pass
def LoginConsumer(self, email, password):
"""
Allows the login of a consumer
@param email
The Consumer email
@param password
The Email password of the consumer
@return Consumer
Consumer object
Parameters:
- email
- password
"""
pass
def AddImageToMedia(self, fileName, image):
"""
Add image file binary
@param binary image
The binary number that will be keep.
@return bool
true or false.
Parameters:
- fileName
- image
"""
pass
def GetImageToMedia(self, fileName):
"""
Get image file binary
@param binary image
The binary number that will be keep.
@return binary
binary image.
Parameters:
- fileName
"""
pass
def DeleteImageToMedia(self, fileName):
"""
Delete image file binary
@param fileName
The fileName of file that will be delete.
@return bool
True or False
Parameters:
- fileName
"""
pass
class Client(Iface):
"""
This file describes the services
that needs to be passed to the API methods in order to
manage Consumer and Content Creator users and Content.
"""
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def GetConsumerById(self, idConsumer):
"""
Get Consumer by Id
@param idConsumer
The Consumer Id to be obtained.
@return Consumer
Consumer object
Parameters:
- idConsumer
"""
self.send_GetConsumerById(idConsumer)
return self.recv_GetConsumerById()
def send_GetConsumerById(self, idConsumer):
self._oprot.writeMessageBegin('GetConsumerById', TMessageType.CALL, self._seqid)
args = GetConsumerById_args()
args.idConsumer = idConsumer
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetConsumerById(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetConsumerById_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetConsumerById failed: unknown result")
def GetConsumerByEmail(self, email):
"""
Get Consumer by email
@param email
The Consumer email to be obtained.
@return bool
bool object
Parameters:
- email
"""
self.send_GetConsumerByEmail(email)
return self.recv_GetConsumerByEmail()
def send_GetConsumerByEmail(self, email):
self._oprot.writeMessageBegin('GetConsumerByEmail', TMessageType.CALL, self._seqid)
args = GetConsumerByEmail_args()
args.email = email
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetConsumerByEmail(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetConsumerByEmail_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetConsumerByEmail failed: unknown result")
def GetConsumerByEmailPassword(self, email, password):
"""
Get Consumer by email and password
@param email
The Consumer email to be obtained.
@param password
The Consumer password to be obtained.
@return Consumer
Consumer object
Parameters:
- email
- password
"""
self.send_GetConsumerByEmailPassword(email, password)
return self.recv_GetConsumerByEmailPassword()
def send_GetConsumerByEmailPassword(self, email, password):
self._oprot.writeMessageBegin('GetConsumerByEmailPassword', TMessageType.CALL, self._seqid)
args = GetConsumerByEmailPassword_args()
args.email = email
args.password = password
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetConsumerByEmailPassword(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetConsumerByEmailPassword_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetConsumerByEmailPassword failed: unknown result")
def AddConsumer(self, newConsumer):
"""
Register a Consumer.
@param newconsumer
@return Consumer
Consumer object added
Parameters:
- newConsumer
"""
self.send_AddConsumer(newConsumer)
return self.recv_AddConsumer()
def send_AddConsumer(self, newConsumer):
self._oprot.writeMessageBegin('AddConsumer', TMessageType.CALL, self._seqid)
args = AddConsumer_args()
args.newConsumer = newConsumer
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_AddConsumer(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = AddConsumer_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
raise TApplicationException(TApplicationException.MISSING_RESULT, "AddConsumer failed: unknown result")
def DeleteConsumer(self, email):
"""
Delete a Consumer
@param email
The Consumer email of the Consumer to be deleted.
@return Id
The Consumer Id of the Consumer deleted.
Parameters:
- email
"""
self.send_DeleteConsumer(email)
return self.recv_DeleteConsumer()
def send_DeleteConsumer(self, email):
self._oprot.writeMessageBegin('DeleteConsumer', TMessageType.CALL, self._seqid)
args = DeleteConsumer_args()
args.email = email
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_DeleteConsumer(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = DeleteConsumer_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "DeleteConsumer failed: unknown result")
def UpdateConsumerPassword(self, email, newPassword):
"""
Update previously registered Consumer password.
@param email
The Consumer Email of the Consumer which require an update password.
@return Consumer
Modified Consumer obejct.
Parameters:
- email
- newPassword
"""
self.send_UpdateConsumerPassword(email, newPassword)
return self.recv_UpdateConsumerPassword()
def send_UpdateConsumerPassword(self, email, newPassword):
self._oprot.writeMessageBegin('UpdateConsumerPassword', TMessageType.CALL, self._seqid)
args = UpdateConsumerPassword_args()
args.email = email
args.newPassword = newPassword
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UpdateConsumerPassword(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UpdateConsumerPassword_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "UpdateConsumerPassword failed: unknown result")
def UpdateConsumerImage(self, email, fileName):
"""
Update previously registered Consumer image.
@param email
The Consumer Email of the Consumer which require an update image.
@return Consumer
Modified Consumer obejct.
Parameters:
- email
- fileName
"""
self.send_UpdateConsumerImage(email, fileName)
return self.recv_UpdateConsumerImage()
def send_UpdateConsumerImage(self, email, fileName):
self._oprot.writeMessageBegin('UpdateConsumerImage', TMessageType.CALL, self._seqid)
args = UpdateConsumerImage_args()
args.email = email
args.fileName = fileName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UpdateConsumerImage(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = UpdateConsumerImage_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorNotFoundE is not None:
raise result.sErrorNotFoundE
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
if result.sErrorInvalidRequestE is not None:
raise result.sErrorInvalidRequestE
raise TApplicationException(TApplicationException.MISSING_RESULT, "UpdateConsumerImage failed: unknown result")
def LoginConsumer(self, email, password):
"""
Allows the login of a consumer
@param email
The Consumer email
@param password
The Email password of the consumer
@return Consumer
Consumer object
Parameters:
- email
- password
"""
self.send_LoginConsumer(email, password)
return self.recv_LoginConsumer()
def send_LoginConsumer(self, email, password):
self._oprot.writeMessageBegin('LoginConsumer', TMessageType.CALL, self._seqid)
args = LoginConsumer_args()
args.email = email
args.password = password
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_LoginConsumer(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = LoginConsumer_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorUserE is not None:
raise result.sErrorUserE
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
raise TApplicationException(TApplicationException.MISSING_RESULT, "LoginConsumer failed: unknown result")
def AddImageToMedia(self, fileName, image):
"""
Add image file binary
@param binary image
The binary number that will be keep.
@return bool
true or false.
Parameters:
- fileName
- image
"""
self.send_AddImageToMedia(fileName, image)
return self.recv_AddImageToMedia()
def send_AddImageToMedia(self, fileName, image):
self._oprot.writeMessageBegin('AddImageToMedia', TMessageType.CALL, self._seqid)
args = AddImageToMedia_args()
args.fileName = fileName
args.image = image
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_AddImageToMedia(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = AddImageToMedia_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
raise TApplicationException(TApplicationException.MISSING_RESULT, "AddImageToMedia failed: unknown result")
def GetImageToMedia(self, fileName):
"""
Get image file binary
@param binary image
The binary number that will be keep.
@return binary
binary image.
Parameters:
- fileName
"""
self.send_GetImageToMedia(fileName)
return self.recv_GetImageToMedia()
def send_GetImageToMedia(self, fileName):
self._oprot.writeMessageBegin('GetImageToMedia', TMessageType.CALL, self._seqid)
args = GetImageToMedia_args()
args.fileName = fileName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetImageToMedia(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = GetImageToMedia_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetImageToMedia failed: unknown result")
def DeleteImageToMedia(self, fileName):
"""
Delete image file binary
@param fileName
The fileName of file that will be delete.
@return bool
True or False
Parameters:
- fileName
"""
self.send_DeleteImageToMedia(fileName)
return self.recv_DeleteImageToMedia()
def send_DeleteImageToMedia(self, fileName):
self._oprot.writeMessageBegin('DeleteImageToMedia', TMessageType.CALL, self._seqid)
args = DeleteImageToMedia_args()
args.fileName = fileName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_DeleteImageToMedia(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = DeleteImageToMedia_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.sErrorSystemE is not None:
raise result.sErrorSystemE
raise TApplicationException(TApplicationException.MISSING_RESULT, "DeleteImageToMedia failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["GetConsumerById"] = Processor.process_GetConsumerById
self._processMap["GetConsumerByEmail"] = Processor.process_GetConsumerByEmail
self._processMap["GetConsumerByEmailPassword"] = Processor.process_GetConsumerByEmailPassword
self._processMap["AddConsumer"] = Processor.process_AddConsumer
self._processMap["DeleteConsumer"] = Processor.process_DeleteConsumer
self._processMap["UpdateConsumerPassword"] = Processor.process_UpdateConsumerPassword
self._processMap["UpdateConsumerImage"] = Processor.process_UpdateConsumerImage
self._processMap["LoginConsumer"] = Processor.process_LoginConsumer
self._processMap["AddImageToMedia"] = Processor.process_AddImageToMedia
self._processMap["GetImageToMedia"] = Processor.process_GetImageToMedia
self._processMap["DeleteImageToMedia"] = Processor.process_DeleteImageToMedia
self._on_message_begin = None
def on_message_begin(self, func):
self._on_message_begin = func
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if self._on_message_begin:
self._on_message_begin(name, type, seqid)
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_GetConsumerById(self, seqid, iprot, oprot):
args = GetConsumerById_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetConsumerById_result()
try:
result.success = self._handler.GetConsumerById(args.idConsumer)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetConsumerById", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetConsumerByEmail(self, seqid, iprot, oprot):
args = GetConsumerByEmail_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetConsumerByEmail_result()
try:
result.success = self._handler.GetConsumerByEmail(args.email)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetConsumerByEmail", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetConsumerByEmailPassword(self, seqid, iprot, oprot):
args = GetConsumerByEmailPassword_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetConsumerByEmailPassword_result()
try:
result.success = self._handler.GetConsumerByEmailPassword(args.email, args.password)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetConsumerByEmailPassword", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_AddConsumer(self, seqid, iprot, oprot):
args = AddConsumer_args()
args.read(iprot)
iprot.readMessageEnd()
result = AddConsumer_result()
try:
result.success = self._handler.AddConsumer(args.newConsumer)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("AddConsumer", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_DeleteConsumer(self, seqid, iprot, oprot):
args = DeleteConsumer_args()
args.read(iprot)
iprot.readMessageEnd()
result = DeleteConsumer_result()
try:
result.success = self._handler.DeleteConsumer(args.email)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("DeleteConsumer", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UpdateConsumerPassword(self, seqid, iprot, oprot):
args = UpdateConsumerPassword_args()
args.read(iprot)
iprot.readMessageEnd()
result = UpdateConsumerPassword_result()
try:
result.success = self._handler.UpdateConsumerPassword(args.email, args.newPassword)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UpdateConsumerPassword", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UpdateConsumerImage(self, seqid, iprot, oprot):
args = UpdateConsumerImage_args()
args.read(iprot)
iprot.readMessageEnd()
result = UpdateConsumerImage_result()
try:
result.success = self._handler.UpdateConsumerImage(args.email, args.fileName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorNotFoundException as sErrorNotFoundE:
msg_type = TMessageType.REPLY
result.sErrorNotFoundE = sErrorNotFoundE
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except SpotifakeManagement.ttypes.SErrorInvalidRequestException as sErrorInvalidRequestE:
msg_type = TMessageType.REPLY
result.sErrorInvalidRequestE = sErrorInvalidRequestE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("UpdateConsumerImage", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_LoginConsumer(self, seqid, iprot, oprot):
args = LoginConsumer_args()
args.read(iprot)
iprot.readMessageEnd()
result = LoginConsumer_result()
try:
result.success = self._handler.LoginConsumer(args.email, args.password)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorUserException as sErrorUserE:
msg_type = TMessageType.REPLY
result.sErrorUserE = sErrorUserE
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("LoginConsumer", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_AddImageToMedia(self, seqid, iprot, oprot):
args = AddImageToMedia_args()
args.read(iprot)
iprot.readMessageEnd()
result = AddImageToMedia_result()
try:
result.success = self._handler.AddImageToMedia(args.fileName, args.image)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("AddImageToMedia", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetImageToMedia(self, seqid, iprot, oprot):
args = GetImageToMedia_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetImageToMedia_result()
try:
result.success = self._handler.GetImageToMedia(args.fileName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("GetImageToMedia", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_DeleteImageToMedia(self, seqid, iprot, oprot):
args = DeleteImageToMedia_args()
args.read(iprot)
iprot.readMessageEnd()
result = DeleteImageToMedia_result()
try:
result.success = self._handler.DeleteImageToMedia(args.fileName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except SpotifakeManagement.ttypes.SErrorSystemException as sErrorSystemE:
msg_type = TMessageType.REPLY
result.sErrorSystemE = sErrorSystemE
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("DeleteImageToMedia", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class GetConsumerById_args(object):
"""
Attributes:
- idConsumer
"""
def __init__(self, idConsumer=None,):
self.idConsumer = idConsumer
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.idConsumer = iprot.readI16()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerById_args')
if self.idConsumer is not None:
oprot.writeFieldBegin('idConsumer', TType.I16, 1)
oprot.writeI16(self.idConsumer)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerById_args)
GetConsumerById_args.thrift_spec = (
None, # 0
(1, TType.I16, 'idConsumer', None, None, ), # 1
)
class GetConsumerById_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorNotFoundE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorNotFoundE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SpotifakeManagement.ttypes.Consumer()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerById_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 2)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 3)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerById_result)
GetConsumerById_result.thrift_spec = (
(0, TType.STRUCT, 'success', [SpotifakeManagement.ttypes.Consumer, None], None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 3
)
class GetConsumerByEmail_args(object):
"""
Attributes:
- email
"""
def __init__(self, email=None,):
self.email = email
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerByEmail_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerByEmail_args)
GetConsumerByEmail_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
)
class GetConsumerByEmail_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorNotFoundE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorNotFoundE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerByEmail_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 2)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 3)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerByEmail_result)
GetConsumerByEmail_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 3
)
class GetConsumerByEmailPassword_args(object):
"""
Attributes:
- email
- password
"""
def __init__(self, email=None, password=None,):
self.email = email
self.password = password
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerByEmailPassword_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 2)
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerByEmailPassword_args)
GetConsumerByEmailPassword_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
(2, TType.STRING, 'password', 'UTF8', None, ), # 2
)
class GetConsumerByEmailPassword_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorNotFoundE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorNotFoundE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SpotifakeManagement.ttypes.Consumer()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetConsumerByEmailPassword_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 2)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 3)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetConsumerByEmailPassword_result)
GetConsumerByEmailPassword_result.thrift_spec = (
(0, TType.STRUCT, 'success', [SpotifakeManagement.ttypes.Consumer, None], None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 3
)
class AddConsumer_args(object):
"""
Attributes:
- newConsumer
"""
def __init__(self, newConsumer=None,):
self.newConsumer = newConsumer
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.newConsumer = SpotifakeManagement.ttypes.Consumer()
self.newConsumer.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddConsumer_args')
if self.newConsumer is not None:
oprot.writeFieldBegin('newConsumer', TType.STRUCT, 1)
self.newConsumer.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(AddConsumer_args)
AddConsumer_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'newConsumer', [SpotifakeManagement.ttypes.Consumer, None], None, ), # 1
)
class AddConsumer_result(object):
"""
Attributes:
- success
- sErrorUserE
"""
def __init__(self, success=None, sErrorUserE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I16:
self.success = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddConsumer_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I16, 0)
oprot.writeI16(self.success)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(AddConsumer_result)
AddConsumer_result.thrift_spec = (
(0, TType.I16, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
)
class DeleteConsumer_args(object):
"""
Attributes:
- email
"""
def __init__(self, email=None,):
self.email = email
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteConsumer_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(DeleteConsumer_args)
DeleteConsumer_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
)
class DeleteConsumer_result(object):
"""
Attributes:
- success
- sErrorNotFoundE
- sErrorSystemE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorNotFoundE=None, sErrorSystemE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorSystemE = sErrorSystemE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I16:
self.success = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteConsumer_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I16, 0)
oprot.writeI16(self.success)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 1)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 2)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 3)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(DeleteConsumer_result)
DeleteConsumer_result.thrift_spec = (
(0, TType.I16, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 3
)
class UpdateConsumerPassword_args(object):
"""
Attributes:
- email
- newPassword
"""
def __init__(self, email=None, newPassword=None,):
self.email = email
self.newPassword = newPassword
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.newPassword = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UpdateConsumerPassword_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
if self.newPassword is not None:
oprot.writeFieldBegin('newPassword', TType.STRING, 2)
oprot.writeString(self.newPassword.encode('utf-8') if sys.version_info[0] == 2 else self.newPassword)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UpdateConsumerPassword_args)
UpdateConsumerPassword_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
(2, TType.STRING, 'newPassword', 'UTF8', None, ), # 2
)
class UpdateConsumerPassword_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorNotFoundE
- sErrorSystemE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorNotFoundE=None, sErrorSystemE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorSystemE = sErrorSystemE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UpdateConsumerPassword_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 2)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 3)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 4)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UpdateConsumerPassword_result)
UpdateConsumerPassword_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 3
(4, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 4
)
class UpdateConsumerImage_args(object):
"""
Attributes:
- email
- fileName
"""
def __init__(self, email=None, fileName=None,):
self.email = email
self.fileName = fileName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.fileName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UpdateConsumerImage_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
if self.fileName is not None:
oprot.writeFieldBegin('fileName', TType.STRING, 2)
oprot.writeString(self.fileName.encode('utf-8') if sys.version_info[0] == 2 else self.fileName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UpdateConsumerImage_args)
UpdateConsumerImage_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
(2, TType.STRING, 'fileName', 'UTF8', None, ), # 2
)
class UpdateConsumerImage_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorNotFoundE
- sErrorSystemE
- sErrorInvalidRequestE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorNotFoundE=None, sErrorSystemE=None, sErrorInvalidRequestE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorNotFoundE = sErrorNotFoundE
self.sErrorSystemE = sErrorSystemE
self.sErrorInvalidRequestE = sErrorInvalidRequestE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorNotFoundE = SpotifakeManagement.ttypes.SErrorNotFoundException()
self.sErrorNotFoundE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.sErrorInvalidRequestE = SpotifakeManagement.ttypes.SErrorInvalidRequestException()
self.sErrorInvalidRequestE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('UpdateConsumerImage_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorNotFoundE is not None:
oprot.writeFieldBegin('sErrorNotFoundE', TType.STRUCT, 2)
self.sErrorNotFoundE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 3)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorInvalidRequestE is not None:
oprot.writeFieldBegin('sErrorInvalidRequestE', TType.STRUCT, 4)
self.sErrorInvalidRequestE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(UpdateConsumerImage_result)
UpdateConsumerImage_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorNotFoundE', [SpotifakeManagement.ttypes.SErrorNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 3
(4, TType.STRUCT, 'sErrorInvalidRequestE', [SpotifakeManagement.ttypes.SErrorInvalidRequestException, None], None, ), # 4
)
class LoginConsumer_args(object):
"""
Attributes:
- email
- password
"""
def __init__(self, email=None, password=None,):
self.email = email
self.password = password
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.email = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.password = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LoginConsumer_args')
if self.email is not None:
oprot.writeFieldBegin('email', TType.STRING, 1)
oprot.writeString(self.email.encode('utf-8') if sys.version_info[0] == 2 else self.email)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 2)
oprot.writeString(self.password.encode('utf-8') if sys.version_info[0] == 2 else self.password)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(LoginConsumer_args)
LoginConsumer_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'email', 'UTF8', None, ), # 1
(2, TType.STRING, 'password', 'UTF8', None, ), # 2
)
class LoginConsumer_result(object):
"""
Attributes:
- success
- sErrorUserE
- sErrorSystemE
"""
def __init__(self, success=None, sErrorUserE=None, sErrorSystemE=None,):
self.success = success
self.sErrorUserE = sErrorUserE
self.sErrorSystemE = sErrorSystemE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SpotifakeManagement.ttypes.Consumer()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorUserE = SpotifakeManagement.ttypes.SErrorUserException()
self.sErrorUserE.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LoginConsumer_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.sErrorUserE is not None:
oprot.writeFieldBegin('sErrorUserE', TType.STRUCT, 1)
self.sErrorUserE.write(oprot)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 2)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(LoginConsumer_result)
LoginConsumer_result.thrift_spec = (
(0, TType.STRUCT, 'success', [SpotifakeManagement.ttypes.Consumer, None], None, ), # 0
(1, TType.STRUCT, 'sErrorUserE', [SpotifakeManagement.ttypes.SErrorUserException, None], None, ), # 1
(2, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 2
)
class AddImageToMedia_args(object):
"""
Attributes:
- fileName
- image
"""
def __init__(self, fileName=None, image=None,):
self.fileName = fileName
self.image = image
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.fileName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.image = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddImageToMedia_args')
if self.fileName is not None:
oprot.writeFieldBegin('fileName', TType.STRING, 1)
oprot.writeString(self.fileName.encode('utf-8') if sys.version_info[0] == 2 else self.fileName)
oprot.writeFieldEnd()
if self.image is not None:
oprot.writeFieldBegin('image', TType.STRING, 2)
oprot.writeBinary(self.image)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(AddImageToMedia_args)
AddImageToMedia_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'fileName', 'UTF8', None, ), # 1
(2, TType.STRING, 'image', 'BINARY', None, ), # 2
)
class AddImageToMedia_result(object):
"""
Attributes:
- success
- sErrorSystemE
"""
def __init__(self, success=None, sErrorSystemE=None,):
self.success = success
self.sErrorSystemE = sErrorSystemE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AddImageToMedia_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 1)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(AddImageToMedia_result)
AddImageToMedia_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 1
)
class GetImageToMedia_args(object):
"""
Attributes:
- fileName
"""
def __init__(self, fileName=None,):
self.fileName = fileName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.fileName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetImageToMedia_args')
if self.fileName is not None:
oprot.writeFieldBegin('fileName', TType.STRING, 1)
oprot.writeString(self.fileName.encode('utf-8') if sys.version_info[0] == 2 else self.fileName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetImageToMedia_args)
GetImageToMedia_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'fileName', 'UTF8', None, ), # 1
)
class GetImageToMedia_result(object):
"""
Attributes:
- success
- sErrorSystemE
"""
def __init__(self, success=None, sErrorSystemE=None,):
self.success = success
self.sErrorSystemE = sErrorSystemE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetImageToMedia_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 1)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(GetImageToMedia_result)
GetImageToMedia_result.thrift_spec = (
(0, TType.STRING, 'success', 'BINARY', None, ), # 0
(1, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 1
)
class DeleteImageToMedia_args(object):
"""
Attributes:
- fileName
"""
def __init__(self, fileName=None,):
self.fileName = fileName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.fileName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteImageToMedia_args')
if self.fileName is not None:
oprot.writeFieldBegin('fileName', TType.STRING, 1)
oprot.writeString(self.fileName.encode('utf-8') if sys.version_info[0] == 2 else self.fileName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(DeleteImageToMedia_args)
DeleteImageToMedia_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'fileName', 'UTF8', None, ), # 1
)
class DeleteImageToMedia_result(object):
"""
Attributes:
- success
- sErrorSystemE
"""
def __init__(self, success=None, sErrorSystemE=None,):
self.success = success
self.sErrorSystemE = sErrorSystemE
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.sErrorSystemE = SpotifakeManagement.ttypes.SErrorSystemException()
self.sErrorSystemE.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DeleteImageToMedia_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.sErrorSystemE is not None:
oprot.writeFieldBegin('sErrorSystemE', TType.STRUCT, 1)
self.sErrorSystemE.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(DeleteImageToMedia_result)
DeleteImageToMedia_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'sErrorSystemE', [SpotifakeManagement.ttypes.SErrorSystemException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| 35.8072
| 134
| 0.61183
| 9,986
| 103,447
| 6.15742
| 0.023934
| 0.014556
| 0.0262
| 0.021956
| 0.888027
| 0.866868
| 0.85794
| 0.846588
| 0.843693
| 0.843303
| 0
| 0.004797
| 0.296693
| 103,447
| 2,888
| 135
| 35.819598
| 0.840341
| 0.05134
| 0
| 0.857278
| 1
| 0
| 0.0419
| 0.008328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.100996
| false
| 0.041726
| 0.003793
| 0.031294
| 0.190612
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3f50aa9c4a24c6260aff08599894edf09d65cd4
| 8,244
|
py
|
Python
|
tests/test_installer/test_resolve.py
|
sdispater/poet
|
5a07ee95e546ab6460bde43bf59837120e17dfa5
|
[
"MIT"
] | 367
|
2017-04-01T15:10:04.000Z
|
2021-12-23T18:26:03.000Z
|
tests/test_installer/test_resolve.py
|
sdispater/poet
|
5a07ee95e546ab6460bde43bf59837120e17dfa5
|
[
"MIT"
] | 22
|
2017-04-13T15:39:02.000Z
|
2017-10-05T14:55:26.000Z
|
tests/test_installer/test_resolve.py
|
sdispater/poet
|
5a07ee95e546ab6460bde43bf59837120e17dfa5
|
[
"MIT"
] | 16
|
2017-04-14T08:19:48.000Z
|
2019-07-21T13:34:12.000Z
|
# -*- coding: utf-8 -*-
from pip.req.req_install import InstallRequirement
from poet.installer import Installer
from poet.repositories import PyPiRepository
from poet.package.pip_dependency import PipDependency
pendulum_req = InstallRequirement.from_line('pendulum==1.2.0')
pytzdata_req = InstallRequirement.from_line('pytzdata==2017.2')
requests_req = InstallRequirement.from_line('requests==2.13.0')
pendulum_hashes = [
'sha256:a97e3ed9557ac0c5c3742f21fa4d852d7a050dd9b1b517e993aebef2dd2eea52',
'sha256:641140a05f959b37a177866e263f6f53a53b711fae6355336ee832ec1a59da8a'
]
pytzdata_hashes = [
'sha256:a4d11b8123d00e947fac88508292b9e148da884fc64b884d9da3897a35fa2ab0',
'sha256:ec36940a8eec0a2ebc66a257a746428f7b4acce24cc000b3cda4805f259a8cd2'
]
requests_hashes = [
'sha256:66f332ae62593b874a648b10a8cb106bfdacd2c6288ed7dec3713c3a808a6017',
'sha256:b70696ebd1a5e6b627e7e3ac1365a4bc60aaf3495e843c1e70448966c5224cab'
]
def test_resolve(mocker, command):
resolve = mocker.patch('piptools.resolver.Resolver.resolve')
reverse_dependencies = mocker.patch('piptools.resolver.Resolver.reverse_dependencies')
resolve_hashes = mocker.patch('piptools.resolver.Resolver.resolve_hashes')
resolve.return_value = [
pendulum_req,
pytzdata_req,
requests_req,
]
reverse_dependencies.return_value = {
'pytzdata': set(['pendulum'])
}
resolve_hashes.return_value = {
pendulum_req: set(pendulum_hashes),
requests_req: set(requests_hashes),
pytzdata_req: set(pytzdata_hashes),
}
installer = Installer(command, PyPiRepository())
packages = installer._resolve([
PipDependency('pendulum', '^1.2'),
PipDependency('requests', '^2.13')
])
pendulum = packages[0]
pytzdata = packages[1]
requests = packages[2]
# Name
assert 'pendulum' == pendulum['name']
assert 'pytzdata' == pytzdata['name']
assert 'requests' == requests['name']
# Version
assert '1.2.0' == pendulum['version']
assert '2017.2' == pytzdata['version']
assert '2.13.0' == requests['version']
# Version
assert set(pendulum_hashes) == set(pendulum['checksum'])
assert set(pytzdata_hashes) == set(pytzdata['checksum'])
assert set(requests_hashes) == set(requests['checksum'])
# Category
assert 'main' == pendulum['category']
assert 'main' == pytzdata['category']
assert 'main' == requests['category']
# Optional
assert not pendulum['optional']
assert not pytzdata['optional']
assert not requests['optional']
# Python
assert ['*'] == pendulum['python']
assert ['*'] == pytzdata['python']
assert ['*'] == requests['python']
def test_resolve_specific_python(mocker, command):
resolve = mocker.patch('piptools.resolver.Resolver.resolve')
reverse_dependencies = mocker.patch('piptools.resolver.Resolver.reverse_dependencies')
resolve_hashes = mocker.patch('piptools.resolver.Resolver.resolve_hashes')
resolve.return_value = [
pendulum_req,
pytzdata_req,
requests_req,
]
reverse_dependencies.return_value = {
'pytzdata': set(['pendulum'])
}
resolve_hashes.return_value = {
pendulum_req: set(pendulum_hashes),
requests_req: set(requests_hashes),
pytzdata_req: set(pytzdata_hashes),
}
installer = Installer(command, PyPiRepository())
packages = installer._resolve([
PipDependency('pendulum', '^1.2'),
PipDependency('requests', {'version': '^2.13', 'python': '~2.7'})
])
pendulum = packages[0]
pytzdata = packages[1]
requests = packages[2]
# Name
assert 'pendulum' == pendulum['name']
assert 'pytzdata' == pytzdata['name']
assert 'requests' == requests['name']
# Version
assert '1.2.0' == pendulum['version']
assert '2017.2' == pytzdata['version']
assert '2.13.0' == requests['version']
# Version
assert set(pendulum_hashes) == set(pendulum['checksum'])
assert set(pytzdata_hashes) == set(pytzdata['checksum'])
assert set(requests_hashes) == set(requests['checksum'])
# Category
assert 'main' == pendulum['category']
assert 'main' == pytzdata['category']
assert 'main' == requests['category']
# Optional
assert not pendulum['optional']
assert not pytzdata['optional']
assert not requests['optional']
# Python
assert ['*'] == pendulum['python']
assert ['*'] == pytzdata['python']
assert ['~2.7'] == requests['python']
def test_resolve_specific_python_parent(mocker, command):
resolve = mocker.patch('piptools.resolver.Resolver.resolve')
reverse_dependencies = mocker.patch('piptools.resolver.Resolver.reverse_dependencies')
resolve_hashes = mocker.patch('piptools.resolver.Resolver.resolve_hashes')
resolve.return_value = [
pendulum_req,
pytzdata_req,
requests_req,
]
reverse_dependencies.return_value = {
'pytzdata': set(['pendulum'])
}
resolve_hashes.return_value = {
pendulum_req: set(pendulum_hashes),
requests_req: set(requests_hashes),
pytzdata_req: set(pytzdata_hashes),
}
installer = Installer(command, PyPiRepository())
packages = installer._resolve([
PipDependency('pendulum', {'version': '^1.2', 'python': '~2.7'}),
PipDependency('requests', '^2.13')
])
pendulum = packages[0]
pytzdata = packages[1]
requests = packages[2]
# Name
assert 'pendulum' == pendulum['name']
assert 'pytzdata' == pytzdata['name']
assert 'requests' == requests['name']
# Version
assert '1.2.0' == pendulum['version']
assert '2017.2' == pytzdata['version']
assert '2.13.0' == requests['version']
# Version
assert set(pendulum_hashes) == set(pendulum['checksum'])
assert set(pytzdata_hashes) == set(pytzdata['checksum'])
assert set(requests_hashes) == set(requests['checksum'])
# Category
assert 'main' == pendulum['category']
assert 'main' == pytzdata['category']
assert 'main' == requests['category']
# Optional
assert not pendulum['optional']
assert not pytzdata['optional']
assert not requests['optional']
# Python
assert ['~2.7'] == pendulum['python']
assert ['~2.7'] == pytzdata['python']
assert ['*'] == requests['python']
def test_resolve_specific_python_and_wildcard_multiple_parent(mocker, command):
resolve = mocker.patch('piptools.resolver.Resolver.resolve')
reverse_dependencies = mocker.patch('piptools.resolver.Resolver.reverse_dependencies')
resolve_hashes = mocker.patch('piptools.resolver.Resolver.resolve_hashes')
resolve.return_value = [
pendulum_req,
pytzdata_req,
requests_req,
]
reverse_dependencies.return_value = {
'pytzdata': set(['pendulum', 'requests'])
}
resolve_hashes.return_value = {
pendulum_req: set(pendulum_hashes),
requests_req: set(requests_hashes),
pytzdata_req: set(pytzdata_hashes),
}
installer = Installer(command, PyPiRepository())
packages = installer._resolve([
PipDependency('pendulum', {'version': '^1.2', 'python': '~2.7'}),
PipDependency('requests', '^2.13')
])
pendulum = packages[0]
pytzdata = packages[1]
requests = packages[2]
# Name
assert 'pendulum' == pendulum['name']
assert 'pytzdata' == pytzdata['name']
assert 'requests' == requests['name']
# Version
assert '1.2.0' == pendulum['version']
assert '2017.2' == pytzdata['version']
assert '2.13.0' == requests['version']
# Version
assert set(pendulum_hashes) == set(pendulum['checksum'])
assert set(pytzdata_hashes) == set(pytzdata['checksum'])
assert set(requests_hashes) == set(requests['checksum'])
# Category
assert 'main' == pendulum['category']
assert 'main' == pytzdata['category']
assert 'main' == requests['category']
# Optional
assert not pendulum['optional']
assert not pytzdata['optional']
assert not requests['optional']
# Python
assert ['~2.7'] == pendulum['python']
assert ['*'] == pytzdata['python']
assert ['*'] == requests['python']
| 31.109434
| 90
| 0.663998
| 815
| 8,244
| 6.576687
| 0.076074
| 0.032836
| 0.042537
| 0.060448
| 0.842351
| 0.842351
| 0.842351
| 0.834515
| 0.828172
| 0.828172
| 0
| 0.054253
| 0.192868
| 8,244
| 264
| 91
| 31.227273
| 0.751277
| 0.024867
| 0
| 0.795812
| 0
| 0
| 0.238612
| 0.114065
| 0
| 0
| 0
| 0
| 0.376963
| 1
| 0.020942
| false
| 0
| 0.020942
| 0
| 0.041885
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5419cdda54866f54ca46c161b98f2a5a3965bc9a
| 86
|
py
|
Python
|
cryptolytic/start.py
|
KyleHaggin/cryptolytic-ds
|
8ea1901dc8cee5d09a0f8d8d2ec480742f35cf01
|
[
"MIT"
] | null | null | null |
cryptolytic/start.py
|
KyleHaggin/cryptolytic-ds
|
8ea1901dc8cee5d09a0f8d8d2ec480742f35cf01
|
[
"MIT"
] | null | null | null |
cryptolytic/start.py
|
KyleHaggin/cryptolytic-ds
|
8ea1901dc8cee5d09a0f8d8d2ec480742f35cf01
|
[
"MIT"
] | null | null | null |
import json
from dotenv import load_dotenv
def init():
load_dotenv(verbose=True)
| 14.333333
| 30
| 0.767442
| 13
| 86
| 4.923077
| 0.692308
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 86
| 5
| 31
| 17.2
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5427cb92ae2d68b89b8ed4f0855850068b3c0f78
| 306
|
py
|
Python
|
pysatNASA/instruments/methods/__init__.py
|
landsito/pysatNASA
|
8ebbad6e8447ed656641a4bbeb29e6a41f06bcc8
|
[
"BSD-3-Clause"
] | 8
|
2021-01-02T11:38:00.000Z
|
2022-01-20T16:55:11.000Z
|
pysatNASA/instruments/methods/__init__.py
|
landsito/pysatNASA
|
8ebbad6e8447ed656641a4bbeb29e6a41f06bcc8
|
[
"BSD-3-Clause"
] | 82
|
2020-08-13T22:05:40.000Z
|
2022-03-30T20:12:26.000Z
|
pysatNASA/instruments/methods/__init__.py
|
landsito/pysatNASA
|
8ebbad6e8447ed656641a4bbeb29e6a41f06bcc8
|
[
"BSD-3-Clause"
] | 5
|
2020-09-17T22:47:02.000Z
|
2022-03-30T04:40:47.000Z
|
from pysatNASA.instruments.methods._cdf import CDF # noqa F401
from pysatNASA.instruments.methods import cdaweb # noqa F401
from pysatNASA.instruments.methods import cnofs # noqa F401
from pysatNASA.instruments.methods import de2 # noqa F401
from pysatNASA.instruments.methods import icon # noqa F401
| 51
| 63
| 0.816993
| 41
| 306
| 6.073171
| 0.292683
| 0.261044
| 0.481928
| 0.62249
| 0.722892
| 0.722892
| 0.722892
| 0
| 0
| 0
| 0
| 0.06015
| 0.130719
| 306
| 5
| 64
| 61.2
| 0.87594
| 0.160131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
58293f49fab55564b5d41ac8064ed866a6143bdf
| 249
|
py
|
Python
|
archapp/tests/test_print_formats.py
|
pcdshub/archapp
|
3544d9eb94f56ae2635a0fa73e12ed67bd38a242
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2017-08-03T16:48:45.000Z
|
2017-08-03T16:48:45.000Z
|
archapp/tests/test_print_formats.py
|
slaclab/archapp
|
3544d9eb94f56ae2635a0fa73e12ed67bd38a242
|
[
"BSD-3-Clause-LBNL"
] | 7
|
2020-02-15T03:31:09.000Z
|
2021-12-01T21:32:23.000Z
|
archapp/tests/test_print_formats.py
|
slaclab/archapp
|
3544d9eb94f56ae2635a0fa73e12ed67bd38a242
|
[
"BSD-3-Clause-LBNL"
] | 5
|
2017-07-26T15:00:00.000Z
|
2018-05-23T21:09:52.000Z
|
import unittest
from archapp import print_formats
def test_print_list_no_crash():
print_formats.list_print([], do_print=False)
print_formats.list_print(["text"], do_print=False)
print_formats.list_print(["text"] * 50, do_print=False)
| 24.9
| 59
| 0.759036
| 37
| 249
| 4.72973
| 0.405405
| 0.274286
| 0.274286
| 0.36
| 0.422857
| 0.422857
| 0.422857
| 0.422857
| 0
| 0
| 0
| 0.009174
| 0.124498
| 249
| 9
| 60
| 27.666667
| 0.793578
| 0
| 0
| 0
| 0
| 0
| 0.032129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0.833333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
5888003b6372e33cb64c161ece8f98627dbdfe43
| 254
|
py
|
Python
|
nmigen_boards/qmtech_xc7a35t.py
|
hansfbaier/amaranth-boards
|
a3e92db69e74cc18a42808f6f72068f05efe018e
|
[
"BSD-2-Clause"
] | 1
|
2022-01-22T20:23:07.000Z
|
2022-01-22T20:23:07.000Z
|
nmigen_boards/qmtech_xc7a35t.py
|
amaranth-community-unofficial/amaranth-boards
|
eacb18700d0ed97f525737ca80d923ebd5851505
|
[
"BSD-2-Clause"
] | null | null | null |
nmigen_boards/qmtech_xc7a35t.py
|
amaranth-community-unofficial/amaranth-boards
|
eacb18700d0ed97f525737ca80d923ebd5851505
|
[
"BSD-2-Clause"
] | null | null | null |
from amaranth_boards.qmtech_xc7a35t import *
from amaranth_boards.qmtech_xc7a35t import __all__
import warnings
warnings.warn("instead of nmigen_boards.qmtech_xc7a35t, use amaranth_boards.qmtech_xc7a35t",
DeprecationWarning, stacklevel=2)
| 36.285714
| 92
| 0.818898
| 31
| 254
| 6.322581
| 0.516129
| 0.244898
| 0.387755
| 0.413265
| 0.377551
| 0.377551
| 0
| 0
| 0
| 0
| 0
| 0.058559
| 0.125984
| 254
| 7
| 93
| 36.285714
| 0.824324
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.231373
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
589abf622c4cca03db8939639d34cbd6d34b4cd3
| 22,041
|
py
|
Python
|
quinoa/_kern.py
|
tsilifis/quinoa
|
cc01e942e0453ad4ec21da6223731745ec543371
|
[
"MIT"
] | 1
|
2020-01-31T02:42:20.000Z
|
2020-01-31T02:42:20.000Z
|
quinoa/_kern.py
|
tsilifis/quinoa
|
cc01e942e0453ad4ec21da6223731745ec543371
|
[
"MIT"
] | null | null | null |
quinoa/_kern.py
|
tsilifis/quinoa
|
cc01e942e0453ad4ec21da6223731745ec543371
|
[
"MIT"
] | null | null | null |
"""
A class for covariance kernel functions.
Author: Panagiotis Tsilifis
Date: 09/10/2017
"""
__all__ = ['Kernel', 'RBF', 'Exponential', 'DifferentialKernel', 'DifferentialKernel_1D']
import numpy as np
from scipy import misc
class Kernel(object):
"""
A class representing covariance kernels.
"""
_input_dim = None
_name = None
@property
def input_dim(self):
return self._input_dim
@input_dim.setter
def input_dim(self, value):
assert isinstance(dim , int)
assert dim > 0
self._input_dim = value
def __init__(self, input_dim, name = 'Kernel'):
"""
Initializes the object
"""
assert isinstance(input_dim, int)
assert input_dim > 0
self._input_dim = input_dim
self._name = name
def cov(self, X, X2 = None):
raise NotImplementedError
def cov_diag(self, X):
raise NotImplementedError
class RBF(Kernel):
"""
A class of Kernel type representing the squared exponential covariance kernel.
"""
_var = None
_lengthscale = None
_iso = None
_n_params = None
@property
def var(self):
return self._var
@var.setter
def var(self, value):
assert value > 0.
self._var = value
@property
def lengthscale(self):
return self._lengthscale
@lengthscale.setter
def lengthscale(self, value):
assert value > 0.
self._lengthscale = value
def __init__(self, input_dim, variance = 1., corr_length = 1., name = 'rbf', iso = True):
"""
Initializing the object.
"""
super(RBF, self).__init__(input_dim, name)
assert variance > 0
if isinstance(corr_length, list):
assert len(corr_length) == input_dim
self._iso = False
for i in range(input_dim):
assert corr_length[i] > 0
self._lengthscale = corr_length
else:
assert corr_length > 0
self._lengthscale = [corr_length] * input_dim
self._iso = True
self._var = variance
if self._iso:
self._n_params = 2
else:
self._n_params = self._input_dim + 1
def eval(self, x, y):
if self._input_dim == 1:
diff = (x - y) / self._lengthscale[0]
return self._var * np.exp( - np.square(diff)/ 2.)
else:
diff = np.array([(x[i] - y[i]) / self._lengthscale[i] for i in range(self._input_dim)])
return self._var * np.exp( - np.sum(np.square(diff))/ 2. )
def cov(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
diff = np.vstack([(X[:,i][:,None] - X[:,i][None,:]).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
return self._var * np.exp( - diff_sq / 2.)
else:
assert Y.shape[1] == self._input_dim
diff = np.vstack([ (X[:,i][:,None] - Y[:,i][None,:]).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] for i in range(self._input_dim)])
diff_sq = np.sum(np.square(diff), 0)
return self._var * np.exp( - diff_sq / 2.)
def d_cov_d_var(self, X, Y = None):
if Y is None:
return self.cov(X) / self._var
else:
return self.cov(X, Y) / self._var
def d_cov_d_logvar(self, X, Y = None):
if Y is None:
return 2 * self.cov(X)
else:
return 2 * self.cov(X, Y)
def d_cov_d_l(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
diff = np.vstack([ (X[:,i][:,None] - X[:,i][None,:]).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
if self._iso:
return self._var * np.exp( - diff_sq / 2.) * np.einsum(1 / np.array(self._lengthscale), [0], np.square(diff), [0,1,2])
else:
return np.vstack([ ( self._var * np.exp( - diff_sq / 2.) * np.square(diff)[i,:,:] / self._lengthscale[i] ** 3).reshape(1, X.shape[0], X.shape[0]) for i in range(self._input_dim)])
else:
diff = np.vstack([ (X[:,i][:,None] - Y[:,i][None,:]).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
if len(self._lengthscale) == 1:
return self._var * np.exp( - diff_sq / 2.) * np.einsum(1 / np.array(self._lengthscale), [0], np.square(diff), [0,1,2])
else:
return np.vstack([ ( self._var * np.exp( - diff_sq / 2.) * np.square(diff)[i,:,:] / self._lengthscale[i] ** 3).reshape(1, X.shape[0], Y.shape[0]) for i in range(self._input_dim)])
def d_cov_d_X(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
der = np.vstack([ (self.cov(X) * (X[:,i][:,None] - X[:,i][None,:])).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] ** 2 for i in range(X.shape[1]) ])
return - der
else:
assert Y.shape[1] == self._input_dim
der = np.vstack([ (self.cov(X,Y) * (X[:,i][:,None] - Y[:,i][None,:])).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] ** 2 for i in range(X.shape[1]) ])
return - der
def d_cov_d_Y(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
return np.vstack([ (self.cov(X) * (X[:,i][:,None] - X[:,i][None,:])).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] ** 2 for i in range(X.shape[1]) ])
else:
assert Y.shape[1] == self._input_dim
return np.vstack([ (self.cov(X,Y) * (X[:,i][:,None] - Y[:,i][None,:])).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] ** 2 for i in range(X.shape[1]) ])
def d2_cov_d_XY(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
return np.vstack([ (self.cov(X) * (1 * (i==j) - (X[:,i][:,None] - X[:,i][None,:]) * (X[:,j][:,None] - X[:,j][None,:]) / self._lengthscale[i] ** 2) / self._lengthscale[j] ** 2).reshape(1, X.shape[0], X.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1])]).reshape((X.shape[1], X.shape[1], X.shape[0], X.shape[0]))
else:
assert Y.shape[1] == self._input_dim
return np.vstack([ (self.cov(X, Y) * (1 * (i==j) - (X[:,i][:,None] - Y[:,i][None,:]) * (X[:,j][:,None] - Y[:,j][None,:]) / self._lengthscale[i] ** 2) / self._lengthscale[j] ** 2).reshape(1, X.shape[0], Y.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1])]).reshape((X.shape[1], X.shape[1], X.shape[0], Y.shape[0]))
def d2_cov_d_XX(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
return - np.vstack([ (self.cov(X) * (1 * (i==j) - (X[:,i][:,None] - X[:,i][None,:]) * (X[:,j][:,None] - X[:,j][None,:]) / self._lengthscale[i] ** 2) / self._lengthscale[j] ** 2).reshape(1, X.shape[0], X.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1])]).reshape((X.shape[1], X.shape[1], X.shape[0], X.shape[0]))
else:
assert Y.shape[1] == self._input_dim
return - np.vstack([ (self.cov(X, Y) * (1 * (i==j) - (X[:,i][:,None] - Y[:,i][None,:]) * (X[:,j][:,None] - Y[:,j][None,:]) / self._lengthscale[i] ** 2) / self._lengthscale[j] ** 2).reshape(1, X.shape[0], Y.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1])]).reshape((X.shape[1], X.shape[1], X.shape[0], Y.shape[0]))
def d2_cov_d_YY(self, X, Y = None):
return - self.d2_cov_d_XX(X, Y)
def d3_cov_d_XXY(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
return - np.vstack([ (self.cov(X) * ((i==l) * (X[:,j][:,None] - X[:,j][None,:]) / (self._lengthscale[j]*self._lengthscale[l])**2 + (j==l) * (X[:,i][:,None] - X[:,i][None,:]) / (self._lengthscale[i]*self._lengthscale[l])**2 + (i==j) * (X[:,l][:,None] - X[:,l][None,:]) / (self._lengthscale[l]*self._lengthscale[i])**2 ) - (X[:,i][:,None] - X[:,i][None,:]) * (X[:,j][:,None] - X[:,j][None,:]) * (X[:,l][:,None] - X[:,l][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[l])**2 ).reshape(1, X.shape[0], X.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1]) for l in range(X.shape[1]) ]).reshape((X.shape[1], X.shape[1], X.shape[1], X.shape[0], X.shape[0]))
else:
assert Y.shape[1] == self._input_dim
return - np.vstack([ (self.cov(X, Y) * ((i==l) * (X[:,j][:,None] - Y[:,j][None,:]) / (self._lengthscale[j]*self._lengthscale[l])**2 + (j==l) * (X[:,i][:,None] - Y[:,i][None,:]) / (self._lengthscale[i]*self._lengthscale[l])**2 + (i==j) * (X[:,l][:,None] - Y[:,l][None,:]) / (self._lengthscale[l]*self._lengthscale[i])**2 ) - (X[:,i][:,None] - Y[:,i][None,:]) * (X[:,j][:,None] - Y[:,j][None,:]) * (X[:,l][:,None] - Y[:,l][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[l])**2 ).reshape(1, X.shape[0], Y.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1]) for l in range(X.shape[1]) ]).reshape((X.shape[1], X.shape[1], Y.shape[1], X.shape[0], Y.shape[0]))
def d3_cov_d_YYX(self, X, Y = None):
return self.d3_cov_d_XXY(X, Y)
def d4_cov_d_XXYY(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
return np.vstack([ (self.cov(X) * ( (i==l)*(j==k)/(self._lengthscale[i]*self._lengthscale[k])**2 + (l==j)*(k==i) / (self._lengthscale[i]*self._lengthscale[j])**2 + (i==j)*(k==l) / (self._lengthscale[i]*self._lengthscale[l])**2 - (j==i) * (X[:,l][:,None] - X[:,l][None,:])*(X[:,k][:,None] - X[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[l]*self._lengthscale[k])**2 - (l==j) * (X[:,i][:,None] - X[:,i][None,:]) * (X[:,k][:,None] - X[:,k][None,:]) / (self._lengthscale[l]*self._lengthscale[i]*self._lengthscale[k])**2 - (l==i)*(X[:,j][:,None] - X[:,j][None,:])*(X[:,k][:,None] - X[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[k])**2 - (l==k)* (X[:,i][:,None] - X[:,i][None,:]) * (X[:,j][:,None] - X[:,j][None,:]) / (self._lengthscale[l]*self._lengthscale[k]*self._lengthscale[i])**2 - (k==j)*(X[:,i][:,None] - X[:,i][None,:])*(X[:,l][:,None] - X[:,l][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[l])**2 - (i==k)*(X[:,l][:,None] - X[:,l][None,:]) * (X[:,j][:,None] - X[:,j][None,:]) / (self._lengthscale[i]*self._lengthscale[l]*self._lengthscale[j])**2 + (X[:,i][:,None] - X[:,i][None,:])*(X[:,j][:,None] - X[:,j][None,:])*(X[:,l][:,None]-X[:,l][None,:] )*(X[:,k][:,None] - X[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[k]*self._lengthscale[l])**2 ) ).reshape(1, X.shape[0], X.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1]) for l in range(X.shape[1]) for k in range(X.shape[1]) ]).reshape((X.shape[1], X.shape[1], X.shape[1], X.shape[1], X.shape[0], X.shape[0]))
else:
assert Y.shape[1] == self._input_dim
return np.vstack([ (self.cov(X, Y) * ( (i==l)*(j==k)/(self._lengthscale[i]*self._lengthscale[k])**2 + (l==j)*(k==i) / (self._lengthscale[i]*self._lengthscale[j])**2 + (i==j)*(k==l) / (self._lengthscale[i]*self._lengthscale[l])**2 - (j==i) * (X[:,l][:,None] - Y[:,l][None,:])*(X[:,k][:,None] - Y[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[l]*self._lengthscale[k])**2 - (l==j) * (X[:,i][:,None] - Y[:,i][None,:]) * (X[:,k][:,None] - Y[:,k][None,:]) / (self._lengthscale[l]*self._lengthscale[i]*self._lengthscale[k])**2 - (l==i)*(X[:,j][:,None] - Y[:,j][None,:])*(X[:,k][:,None] - Y[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[k])**2 - (l==k)* (X[:,i][:,None] - Y[:,i][None,:]) * (X[:,j][:,None] - Y[:,j][None,:]) / (self._lengthscale[l]*self._lengthscale[k]*self._lengthscale[i])**2 - (k==j)*(X[:,i][:,None] - Y[:,i][None,:])*(X[:,l][:,None] - Y[:,l][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[l])**2 - (i==k)*(X[:,l][:,None] - Y[:,l][None,:]) * (X[:,j][:,None] - Y[:,j][None,:]) / (self._lengthscale[i]*self._lengthscale[l]*self._lengthscale[j])**2 + (X[:,i][:,None] - Y[:,i][None,:])*(X[:,j][:,None] - Y[:,j][None,:])*(X[:,l][:,None] - Y[:,l][None,:] )*(X[:,k][:,None] - Y[:,k][None,:]) / (self._lengthscale[i]*self._lengthscale[j]*self._lengthscale[k]*self._lengthscale[l])**2 ) ).reshape(1, X.shape[0], Y.shape[0]) for i in range(X.shape[1]) for j in range(X.shape[1]) for l in range(X.shape[1]) for k in range(X.shape[1]) ]).reshape((X.shape[1], X.shape[1], X.shape[1], X.shape[1], X.shape[0], Y.shape[0]))
class Exponential(Kernel):
"""
A class of Kernel type representing the squared exponential covariance kernel.
"""
_var = None
_lengthscale = None
_iso = None
_gamma = None
_n_params = None
@property
def var(self):
return self._var
@var.setter
def var(self, value):
assert value > 0.
self._var = value
@property
def lengthscale(self):
return self._lengthscale
@lengthscale.setter
def lengthscale(self, value):
assert value > 0.
self._lengthscale = value
@property
def gamma(self):
return self._gamma
@gamma.setter
def gamma(self, value):
assert value > 0
assert value <= 2.
self._gamma = value
def __init__(self, input_dim, variance = 1., corr_length = 1., name = 'exponential', iso = True, gamma = 1):
"""
Initializing the object.
"""
super(Exponential, self).__init__(input_dim, name)
assert variance > 0
if isinstance(corr_length, list):
assert len(corr_length) == input_dim
for i in range(input_dim):
assert corr_length[i] > 0
self._lengthscale = corr_length
self._iso = False
else:
assert corr_length > 0
self._lengthscale = [corr_length] * input_dim
self._iso = True
self._var = variance
self._gamma = gamma
#if self._iso:
self._n_params = 2
#else:
# self._n_params = self._input_dim + 1
def cov(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
diff = np.vstack([(X[:,i][:,None] - X[:,i][None,:]).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
return self._var * np.exp(- ( np.sqrt(diff_sq) ) ** self._gamma)
else:
diff = np.vstack([(X[:,i][:,None] - Y[:,i][None,:]).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
return self._var * np.exp(- ( np.sqrt(diff_sq) ) ** self._gamma)
def d_cov_d_var(self, X, Y = None):
if Y is None:
return self.cov(X) / self._var
else:
return self.cov(X, Y) / self._var
def d_cov_d_logvar(self, X, Y = None):
if Y is None:
return 2 * self.cov(X)
else:
return 2 * self.cov(X, Y)
def d_cov_d_l(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is None:
diff = np.vstack([(X[:,i][:,None] - X[:,i][None,:]).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
#diff = np.vstack([(X[:,i][:,None] - X[:,i][None,:]).reshape(1, X.shape[0], X.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
if self._iso:
return self._var * np.exp( - (np.sqrt(diff_sq) ) ** self._gamma ) * np.sqrt(diff_sq) ** self._gamma / self._lengthscale[0] ** (self._gamma - 1.)
else:
diff = np.vstack([(X[:,i][:,None] - Y[:,i][None,:]).reshape(1, X.shape[0], Y.shape[0]) / self._lengthscale[i] for i in range(X.shape[1])])
diff_sq = np.sum(np.square(diff), 0)
if self._iso:
return self._var * np.exp( - (np.sqrt(diff_sq) ) ** self._gamma ) * np.sqrt(diff_sq) ** self._gamma / self._lengthscale[0] ** (self._gamma - 1.)
class DifferentialKernel(Kernel):
"""
A class of covariance kernels defined by differential operators.
"""
_order = None
_diff_factors = None
_base_kernel = None
_active_inp = None
def __init__(self, input_dim, order, diff_factors = None, base_kernel = None, active_inp_1 = True, name = 'differential kernel'):
"""
Initializes the object
"""
super(DifferentialKernel, self).__init__(input_dim, name)
assert isinstance(order, int)
assert order > -1 and order < 3, 'Only up to 2nd order differential operators are currently supported.'
self._order = order
if base_kernel is not None:
assert isinstance(base_kernel, Kernel), 'Covariance kernel must be a Kernel object.'
self._base_kernel = base_kernel
if diff_factors is None:
self._diff_factors = [self.const_factor] * self.mi_terms(order, input_dim).shape[0]
else:
assert isinstance(diff_factors, list)
assert len(diff_factors) == self.mi_terms(order, input_dim).shape[0]
self._diff_factors = diff_factors
self._active_inp = active_inp_1
def mi_terms(self, order = None, dim = None):
""" matrix of basis terms
Input
:order: PCE order
:dim: PCE dimension
"""
if order is None:
order = self._order
if dim is None:
dim = self._input_dim
if order == 0:
return np.array(np.zeros(dim, dtype = int), dtype = int)
else:
q_num = [int(misc.comb(dim+i-1, i)) for i in range(order+1)]
mul_ind = np.array(np.zeros(dim, dtype = int), dtype = int)
mul_ind = np.vstack([mul_ind, np.eye(dim, dtype = int)])
I = np.eye(dim, dtype = int)
ind = [1] * dim
for j in range(1,order):
ind_new = []
for i in range(dim):
a0 = np.copy(I[int(np.sum(ind[:i])):,:])
a0[:,i] += 1
mul_ind = np.vstack([mul_ind, a0])
ind_new += [a0.shape[0]]
ind = ind_new
I = np.copy(mul_ind[np.sum(q_num[:j+1]):,:])
return mul_ind
def const_factor(self, X):
return np.ones(X.shape[0])
def set_diff_factor(self, factor, index):
assert isinstance(index, int)
assert index > -1 and index < len(self._diff_factors)
self._diff_factors[index] = factor
def cov(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is not None:
assert Y.shape[1] == self._input_dim
#cov = np.zeros((X.shape[0],X.shape[0]))
if self._active_inp:
if self._order == 0:
C = self._base_kernel.cov(X, Y)
return (self._diff_factors[0](X) * C.T).T
elif self._order == 1:
C = self._base_kernel.cov(X, Y)
dC = self._base_kernel.d_cov_d_X(X, Y)
return (self._diff_factors[0](X) * C.T).T + np.array([(self._diff_factors[i+1](X) * dC[i,:,:].T).T for i in range(self._input_dim)]).sum(axis = 0)
else:
C = self._base_kernel.cov(X, Y)
dC = self._base_kernel.d_cov_d_X(X, Y)
d2C = self._base_kernel.d2_cov_d_XX(X, Y)
#K = [C, dC, d2C] # Shapes are (n x n), (d x n x n), (d x d x n x n)
return (self._diff_factors[0](X) * C.T).T + np.array([(self._diff_factors[i+1](X) * dC[i,:,:].T).T for i in range(self._input_dim)]).sum(axis = 0) + np.array([(self._diff_factors[i*self._input_dim + j - np.sum(range(i+1))](X) * d2C[i,j,:,:]).T for i in range(self._input_dim) for j in range(i, self._input_dim)]).sum(axis = 0)
else:
if self._order == 0:
C = self._base_kernel.cov(X, Y)
return (self._diff_factors[0](X) * C.T).T
elif self._order == 1:
C = self._base_kernel.cov(X, Y)
dC = self._base_kernel.d_cov_d_Y(X, Y)
return (self._diff_factors[0](X) * C.T).T + np.array([(self._diff_factors[i+1](X) * dC[i,:,:].T).T for i in range(self._input_dim)]).sum(axis = 0)
else:
C = self._base_kernel.cov(X, Y)
dC = self._base_kernel.d_cov_d_Y(X, Y)
d2C = self._base_kernel.d2_cov_d_YY(X, Y)
#K = [C, dC, d2C] # Shapes are (n x n), (d x n x n), (d x d x n x n)
return (self._diff_factors[0](X) * C.T).T + np.array([(self._diff_factors[i+1](X) * dC[i,:,:].T).T for i in range(self._input_dim)]).sum(axis = 0) + np.array([(self._diff_factors[i*self._input_dim + j - np.sum(range(i+1))](X) * d2C[i,j,:,:]).T for i in range(self._input_dim) for j in range(i, self._input_dim)]).sum(axis = 0)
class DifferentialKernel_1D(Kernel):
"""
A class of covariance kernels defined by differential operators.
"""
_order = None
_diff_factors = None
_base_kernel = None
_active_inp = None
def __init__(self, input_dim, order, diff_factors = None, base_kernel = None, active_inp_1 = True, name = 'differential kernel'):
"""
Initializes the object
"""
super(DifferentialKernel_1D, self).__init__(input_dim, name)
assert isinstance(order, int)
#assert order > -1 and order < 3, 'Only up to 2nd order differential operators are currently supported.'
self._order = order
if base_kernel is not None:
assert isinstance(base_kernel, Kernel), 'Covariance kernel must be a Kernel object.'
self._base_kernel = base_kernel
if diff_factors is None:
self._diff_factors = [self.const_factor] * 9
else:
assert isinstance(diff_factors, list)
assert len(diff_factors) == self.mi_terms(order, input_dim).shape[0]
self._diff_factors = diff_factors
self._active_inp = active_inp_1
def mi_terms(self, order = None, dim = None):
""" matrix of basis terms
Input
:order: PCE order
:dim: PCE dimension
"""
if order is None:
order = self._order
if dim is None:
dim = self._input_dim
if order == 0:
return np.array(np.zeros(dim, dtype = int), dtype = int)
else:
q_num = [int(misc.comb(dim+i-1, i)) for i in range(order+1)]
mul_ind = np.array(np.zeros(dim, dtype = int), dtype = int)
mul_ind = np.vstack([mul_ind, np.eye(dim, dtype = int)])
I = np.eye(dim, dtype = int)
ind = [1] * dim
for j in range(1,order):
ind_new = []
for i in range(dim):
a0 = np.copy(I[int(np.sum(ind[:i])):,:])
a0[:,i] += 1
mul_ind = np.vstack([mul_ind, a0])
ind_new += [a0.shape[0]]
ind = ind_new
I = np.copy(mul_ind[np.sum(q_num[:j+1]):,:])
return mul_ind
def const_factor(self, X):
return np.ones(X.shape[0])
def set_diff_factor(self, factor, index):
assert isinstance(index, int)
assert index > -1 and index < len(self._diff_factors)
self._diff_factors[index] = factor
def cov(self, X, Y = None):
assert X.shape[1] == self._input_dim
if Y is not None:
assert Y.shape[1] == self._input_dim
#cov = np.zeros((X.shape[0],X.shape[0]))
C = self._base_kernel.cov(X,Y)
dC_1 = self._base_kernel.d_cov_d_X(X, Y)
dC_2 = self._base_kernel.d_cov_d_Y(X, Y)
d2C_1 = self._base_kernel.d2_cov_d_XX(X, Y)
d2C_2 = self._base_kernel.d2_cov_d_YY(X, Y)
d2C_3 = self._base_kernel.d2_cov_d_XY(X, Y)
d3C_1 = self._base_kernel.d3_cov_d_XXY(X, Y)
d3C_2 = self._base_kernel.d3_cov_d_YYX(X, Y)
d4C = self._base_kernel.d4_cov_d_XXYY(X, Y)
return self._diff_factors[0](X,Y) * C + self._diff_factors[1](X,Y) * dC_1[0,:,:] + self._diff_factors[2](X,Y) * dC_2[0,:,:] + self._diff_factors[3](X,Y) * d2C_1[0,0,:,:] + self._diff_factors[4](X,Y) * d2C_2[0,0,:,:] + self._diff_factors[5](X,Y) * d2C_3[0,0,:,:] + self._diff_factors[6](X,Y) * d3C_1[0,0,0,:,:] + self._diff_factors[7](X,Y) * d3C_2[0,0,0,:,:] + self._diff_factors[8](X,Y) * d4C[0,0,0,0,:,:]
| 41.508475
| 1,592
| 0.615353
| 3,879
| 22,041
| 3.325342
| 0.042021
| 0.055818
| 0.036359
| 0.0307
| 0.914102
| 0.902706
| 0.885573
| 0.883014
| 0.870688
| 0.856268
| 0
| 0.022832
| 0.15943
| 22,041
| 530
| 1,593
| 41.586792
| 0.673396
| 0.053264
| 0
| 0.764228
| 0
| 0
| 0.012986
| 0.001014
| 0
| 0
| 0
| 0
| 0.140921
| 1
| 0.119241
| false
| 0
| 0.00542
| 0.0271
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
54572bef89ec80b3dd13fa15764dd364decb894e
| 5,432
|
py
|
Python
|
FundamentalAnalysis/financial_statements.py
|
Site-Command/FundamentalAnalysis
|
1841496093729d1e5cf889980f31c2fec0d53aad
|
[
"MIT"
] | null | null | null |
FundamentalAnalysis/financial_statements.py
|
Site-Command/FundamentalAnalysis
|
1841496093729d1e5cf889980f31c2fec0d53aad
|
[
"MIT"
] | null | null | null |
FundamentalAnalysis/financial_statements.py
|
Site-Command/FundamentalAnalysis
|
1841496093729d1e5cf889980f31c2fec0d53aad
|
[
"MIT"
] | null | null | null |
from urllib.request import urlopen
from urllib.error import HTTPError
import json
import pandas as pd
def income_statement(ticker, api_key, period="annual", as_reported=False):
"""
Description
----
Gives information about the income statement of a company overtime
which includes i.a. revenue, operating expenses, profit margin and ETBIDA.
Input
----
ticker (string)
The company ticker (for example: "GOOGL")
api_key (string)
The API Key obtained from https://financialmodelingprep.com/developer/docs/
period (string)
Data period, this can be "annual" or "quarter".
as_reported (boolean)
Raw data without modifications.
Output
----
data (dataframe)
Data with variables in rows and the period in columns.
"""
if as_reported:
URL = (f"https://financialmodelingprep.com/api/v3/income-statement-as-reported/{ticker}"
f"?period={period}&apikey={api_key}")
else:
URL = f"https://financialmodelingprep.com/api/v3/income-statement/{ticker}?period={period}&apikey={api_key}"
try:
response = urlopen(URL)
data = json.loads(response.read().decode("utf-8"))
except HTTPError:
raise ValueError("This endpoint is only for premium members. Please visit the subscription page to upgrade the "
"plan (Starter or higher) at https://financialmodelingprep.com/developer/docs/pricing")
if 'Error Message' in data:
raise ValueError(data['Error Message'])
data_formatted = {}
for value in data:
if period == "quarter":
date = value['date'][:7]
else:
date = value['date'][:4]
del value['date']
del value['symbol']
data_formatted[date] = value
return pd.DataFrame(data_formatted)
def balance_sheet_statement(ticker, api_key, period="annual", as_reported=False):
"""
Description
----
Gives information about the balance sheet statement of a company overtime
which includes i.a. total assets, payables, tax liabilities and investments.
Input
----
ticker (string)
The company ticker (for example: "RDS-B")
api_key (string)
The API Key obtained from https://financialmodelingprep.com/developer/docs/
period (string)
Data period, this can be "annual" or "quarter".
as_reported (boolean)
Raw data without modifications.
Output
----
data (dataframe)
Data with variables in rows and the period in columns.
"""
if as_reported:
URL = (f"https://financialmodelingprep.com/api/v3/balance-sheet-statement-as-reported/{ticker}"
f"?period={period}&apikey={api_key}")
else:
URL = (f"https://financialmodelingprep.com/api/v3/balance-sheet-statement/{ticker}"
f"?period={period}&apikey={api_key}")
try:
response = urlopen(URL)
data = json.loads(response.read().decode("utf-8"))
except HTTPError:
raise ValueError("This endpoint is only for premium members. Please visit the subscription page to upgrade the "
"plan (Starter or higher) at https://financialmodelingprep.com/developer/docs/pricing")
if 'Error Message' in data:
raise ValueError(data['Error Message'])
data_formatted = {}
for value in data:
if period == "quarter":
date = value['date'][:7]
else:
date = value['date'][:4]
del value['date']
del value['symbol']
data_formatted[date] = value
return pd.DataFrame(data_formatted)
def cash_flow_statement(ticker, api_key, period="annual", as_reported=False):
"""
Description
----
Gives information about the cash flow statement of a company overtime
which includes i.a. operating cash flow, dividend payments and capital expenditure.
Input
----
ticker (string)
The company ticker (for example: "NKE")
api_key (string)
The API Key obtained from https://financialmodelingprep.com/developer/docs/
period (string)
Data period, this can be "annual" or "quarter".
as_reported (boolean)
Raw data without modifications.
Output
----
data (dataframe)
Data with variables in rows and the period in columns.
"""
if as_reported:
URL = (f"https://financialmodelingprep.com/api/v3/cash-flow-statement-as-reported/{ticker}"
f"?period={period}&apikey={api_key}")
else:
URL = (f"https://financialmodelingprep.com/api/v3/cash-flow-statement/{ticker}"
f"?period={period}&apikey={api_key}")
try:
response = urlopen(URL)
data = json.loads(response.read().decode("utf-8"))
except HTTPError:
raise ValueError("This endpoint is only for premium members. Please visit the subscription page to upgrade the "
"plan (Starter or higher) at https://financialmodelingprep.com/developer/docs/pricing")
if 'Error Message' in data:
raise ValueError(data['Error Message'])
data_formatted = {}
for value in data:
if period == "quarter":
date = value['date'][:7]
else:
date = value['date'][:4]
del value['date']
del value['symbol']
data_formatted[date] = value
return pd.DataFrame(data_formatted)
| 32.921212
| 120
| 0.631996
| 649
| 5,432
| 5.235747
| 0.194145
| 0.026486
| 0.102413
| 0.067098
| 0.910241
| 0.910241
| 0.910241
| 0.910241
| 0.872278
| 0.829017
| 0
| 0.003712
| 0.256075
| 5,432
| 165
| 121
| 32.921212
| 0.837169
| 0.304676
| 0
| 0.833333
| 0
| 0.038462
| 0.387472
| 0.046769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.051282
| 0
| 0.128205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54966ec7c2f770f8166da71a3921776866a62fa7
| 4,380
|
py
|
Python
|
14.04.21list2.py
|
bzhumakova/FirstProject
|
048fe1feccd3795590fc3ca7f1a05372f9607ae9
|
[
"MIT"
] | 2
|
2021-04-17T10:36:38.000Z
|
2021-04-21T15:24:08.000Z
|
14.04.21list2.py
|
zhumakova/FirstProject
|
048fe1feccd3795590fc3ca7f1a05372f9607ae9
|
[
"MIT"
] | null | null | null |
14.04.21list2.py
|
zhumakova/FirstProject
|
048fe1feccd3795590fc3ca7f1a05372f9607ae9
|
[
"MIT"
] | null | null | null |
# my_list=['honda','honda','bmw','mercedes','bugatti']
# print(my_list.index('honda'))
# print(my_list[0])
# print(my_list.count('honda'))
# my_list.sort()
# print(my_list)
# my_list.pop()
# print(my_list)
# my_list=['honda','honda','bmw','mercedes','bugatti']
# copy_my_list=my_list.copy()
# print(copy_my_list)
# list1=[1,2,3]
# list2=list1.copy()
# del list1[0]
# print(list2)
# my_list=['honda','honda','bmw','mercedes','bugatti']
# # my_list.append('lexus')
# # print(my_list)
# my_list=['honda','honda','bmw','mercedes','bugatti']
# my_list.insert(1,'ford')
# print(my_list)
# my_list=['honda','honda','bmw','mercedes','bugatti']
#
# extend_list=['hello','ghetto']
# my_list.extend('Hello')
# print(my_list)
# my_list=['honda','honda','bmw','mercedes','bugatti']
# my_list.remove('honda')
# print(my_list)
# my_list=['honda','honda','bmw','mercedes','bugatti']
# my_list.reverse()
# print(my_list)
#
# my_list=['honda','honda','bmw','mercedes','bugatti']
# numbers=[1,2,3,4,5,6]
# print(my_list[2:])
# print(my_list[:3])
# print(my_list[1:4])
# print(numbers[0:4])
# print(numbers[0:5:2])
# numbers=[1,2,3,4,5,6,7,8,9,10]
# print(numbers[::3])
# numbers=[1,2,3,4,5,6,7,8,9,10]
# print(numbers[::-2])
# data = ['Wt','Ht',342432423424324,5.996,5.77778,'Insurance_History_2',34243242342432124545312312534534534,'Insurance_History_4','Insurance_History_5', 'Insurance_History_7',234242049004328402384023849028402348203,55, 66, 11, 'Medical_Keyword_3','Medical_Keyword_4', 'Medical_Keyword_5', 'Medical_Keyword_6', 34243242342432124545312312534534534534503495345,'lalalalallalalalalalalalalalalala', 23409284028430928420483209482904380428, 'Medical_Keyword_10', 'Medical_Keyword_11',92384923849023849023842903482934324290, 93429423018319238192004829423482942, 'Medical_Keyword_14', 'Medical_Keyword_15','Medical_Keyword_16', 5.888, 'Medical_Keyword_18asfdasfdasfdasfdasdfasdfas','Medicagsfgsfgsfkgjsfkg',9.131, 0.978, 'Famidasdasdlasdlaspdlaspdlasp2948203948', 'Familygsdglksflg2849023840923;fksdkgsd234234234238409238490238','Family_Hist_4','Family_Hist_5', 9.19, 'Medical_History_2', 'Medical_History_3', 'Medical_History_4',13, 'Medical_History_6', 'Medical_History_7', 111, 'Medical_History_9',123.7773, 'Medical_History_41', 55823428882482374824828472348,'Product_Info_3',1111111111111111111111, 'Product_Info_5']
#
# i=0
# while i<len(data):
# obj = data[i]
# if isinstance(obj ,float):
#
# if obj%1>=0.8 or obj%1<=0.2:
# data[i] = round(obj)
# else:
# data[i]=int(obj)
#
# elif isinstance(obj,int):
# str_1=str(obj)
# if len(str_1)>20:
# del data[i]
# i-=1
#
# elif isinstance(data[i],str):
# if len(data[i])>50:
# del data[i]
# i-=1
# i+=1
#
# print(data)
data = ['Wt','Ht',342432423424324,5.996,5.77778,'Insurance_History_2',34243242342432124545312312534534534,'Insurance_History_4','Insurance_History_5', 'Insurance_History_7',234242049004328402384023849028402348203,55, 66, 11, 'Medical_Keyword_3','Medical_Keyword_4', 'Medical_Keyword_5', 'Medical_Keyword_6', 34243242342432124545312312534534534534503495345,'lalalalallalalalalalalalalalalala', 23409284028430928420483209482904380428, 'Medical_Keyword_10', 'Medical_Keyword_11',92384923849023849023842903482934324290, 93429423018319238192004829423482942, 'Medical_Keyword_14', 'Medical_Keyword_15','Medical_Keyword_16', 5.888, 'Medical_Keyword_18asfdasfdasfdasfdasdfasdfas','Medicagsfgsfgsfkgjsfkg',9.131, 0.978, 'Famidasdasdlasdlaspdlaspdlasp2948203948', 'Familygsdglksflg2849023840923;fksdkgsd234234234238409238490238','Family_Hist_4','Family_Hist_5', 9.19, 'Medical_History_2', 'Medical_History_3', 'Medical_History_4',13, 'Medical_History_6', 'Medical_History_7', 111, 'Medical_History_9',123.7773, 'Medical_History_41', 55823428882482374824828472348,'Product_Info_3',1111111111111111111111, 'Product_Info_5']
clear_data=[]
i=0
while i<len(data):
obj = data[i]
if isinstance(obj ,float):
if obj%1>=0.8 or obj%1<=0.2:
clear_data.append(round(obj))
else:
clear_data.append(int(obj))
elif isinstance(obj,int):
str_1=str(obj)
if len(str_1)<=20:
clear_data.append(str_1)
elif isinstance(obj,str):
if len(obj)<=50:
clear_data.append(obj)
i+=1
print(clear_data)
| 40.934579
| 1,113
| 0.71484
| 562
| 4,380
| 5.313167
| 0.169039
| 0.062291
| 0.04789
| 0.042867
| 0.835901
| 0.82351
| 0.82351
| 0.80777
| 0.80777
| 0.794374
| 0
| 0.237432
| 0.114384
| 4,380
| 107
| 1,114
| 40.934579
| 0.532354
| 0.606393
| 0
| 0
| 0
| 0
| 0.370236
| 0.120992
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
54b768fe1068f0c4f2e1101c41ed4f6078bba368
| 379,775
|
py
|
Python
|
angr_platforms/tricore/rrr1_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
angr_platforms/tricore/rrr1_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
angr_platforms/tricore/rrr1_instr.py
|
shahinsba/angr-platforms
|
86f9ea90c396fb5561d0196a2d1a873e573b0294
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
""" rrr1_instr.py
Implementation of RRR1 format instructions.
"""
from pyvex.lifting.util import Type, Instruction
from .rtl import * # pylint: disable=[wildcard-import, unused-wildcard-import]
from .logger import log_this
class RRR1_MADD_H_83_1A_Inst(Instruction):
""" Packed Multiply-Add Q Format instruction:
op = 0x83
op2 = 0x1A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.H_83_1A'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_H_83_19_Inst(Instruction):
""" Packed Multiply-Add Q Format instruction:
op = 0x83
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.H_83_19'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffff)
e_d_1 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_2 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w1 = e_d_2 + mul_res1
result_w0 = e_d_1 + mul_res0
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_H_83_18_Inst(Instruction):
""" Packed Multiply-Add Q Format instruction:
op = 0x83
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.H_83_18'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_H_83_1B_Inst(Instruction):
""" Packed Multiply-Add Q Format instruction:
op = 0x83
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.H_83_1B'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_H_83_3A_Inst(Instruction):
""" Packed Multiply-Add Q Format, Saturated instruction:
op = 0x83
op2 = 0x3A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.H_83_3A'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0 = ssov32(e_d_0 + mul_res0, max_pos, max_neg)
result_w1 = ssov32(e_d_1 + mul_res1, max_pos, max_neg)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_H_83_39_Inst(Instruction):
""" Packed Multiply-Add Q Format, Saturated instruction:
op = 0x83
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.H_83_39'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_H_83_38_Inst(Instruction):
""" Packed Multiply-Add Q Format, Saturated instruction:
op = 0x83
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.H_83_38'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_H_83_3B_Inst(Instruction):
""" Packed Multiply-Add Q Format, Saturated instruction:
op = 0x83
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.H_83_3B'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_Q_43_02_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x02
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_02'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(2)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d + (((d_a * d_b) << n.value) >> 32)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADD_Q_43_1B_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_1B'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * d_b) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_Q_43_01_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x01
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_01'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(1)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d + (((d_a * (d_b & 0xffff)) << n.value) >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADD_Q_43_19_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_19'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b & 0xffff)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_Q_43_00_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x00
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_00'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d + (((d_a * (d_b >> 16)) << n.value) >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADD_Q_43_18_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_18'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b >> 16)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_Q_43_05_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x05
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_05'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(5)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
result = d_d + mul_res
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADD_Q_43_1D_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_1D'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (mul_res << 16)
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADD_Q_43_04_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x04
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_04'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(4)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
result = d_d + mul_res
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADD_Q_43_1C_Inst(Instruction):
""" Multiply-Add Q Format instruction:
op = 0x43
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADD.Q_43_1C'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (mul_res << 16)
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_Q_43_22_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x22
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_22'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(2)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d + (((d_a * d_b) << n.value) >> 32)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDS_Q_43_3B_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_3B'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * d_b) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_Q_43_21_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x21
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_21'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(1)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d + (((d_a * (d_b & 0xffff)) << n.value) >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDS_Q_43_39_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_39'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b & 0xffff)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_Q_43_20_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x20
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_20'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d + (((d_a * (d_b >> 16)) << n.value) >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDS_Q_43_38_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_38'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b >> 16)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + result_tmp
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_Q_43_25_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x25
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_25'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(5)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
result1 = d_d + mul_res
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDS_Q_43_3D_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_3D'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (mul_res << 16)
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDS_Q_43_24_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x24
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_24'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(4)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
result1 = d_d + mul_res
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDS_Q_43_3C_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x43
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.Q_43_3C'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (mul_res << 16)
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDM_H_83_1E_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision instruction:
op = 0x83
op2 = 0x1E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDM.H_83_1E'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDM_H_83_1D_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision instruction:
op = 0x83
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDM.H_83_1D'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDM_H_83_1C_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision instruction:
op = 0x83
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDM.H_83_1C'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDM_H_83_1F_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision instruction:
op = 0x83
op2 = 0x1F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDM.H_83_1F'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDMS_H_83_3E_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision, Saturated instruction:
op = 0x83
op2 = 0x3E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDMS.H_83_3E'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDMS_H_83_3D_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision, Saturated instruction:
op = 0x83
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDMS.H_83_3D'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 0xffff)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDMS_H_83_3C_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision, Saturated instruction:
op = 0x83
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDMS.H_83_3C'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 0xffff) # TODO: >> 16
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDMS_H_83_3F_Inst(Instruction):
""" Packed Multiply-Add Q Format Multi-precision, Saturated instruction:
op = 0x83
op2 = 0x3F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDMS.H_83_3F'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffff))
sum1 = result_w1 + result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDR_H_83_0E_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0x83
op2 = 0x0E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_83_0E'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
mul_res0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_H_83_0D_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0x83
op2 = 0x0D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_83_0D'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
mul_res0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff))
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_H_83_0C_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0x83
op2 = 0x0C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_83_0C'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff))
mul_res0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_H_43_1E_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0x43
op2 = 0x1E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_43_1E'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff))
mul_res0 = (0x7fffffff & sc0) | (((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_hw0 = e_d_0 + mul_res0 + 0x8000
result_hw1 = e_d_1 + mul_res1 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_H_83_0F_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0x83
op2 = 0x0F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_83_0F'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_H_83_2E_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x83
op2 = 0x2E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.H_83_2E'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = ((d_d & 0xffff0000) + mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_H_83_2D_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x83
op2 = 0x2D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.H_83_2D'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_hw1_ssov = ssov32(result_hw1, max_pos, max_neg)
result_hw0_ssov = ssov32(result_hw0, max_pos, max_neg)
result = (result_hw1_ssov & 0xffff0000) | (result_hw0_ssov >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_H_83_2C_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x83
op2 = 0x2C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.H_83_2C'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = ((d_d & 0xffff0000) + mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_H_43_3E_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x43
op2 = 0x3E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.H_43_3E'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff))
mul_res0 = (0x7fffffff & sc0) | (((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_hw0 = (e_d_0 + mul_res0 + 0x8000).cast_to(Type.int_64)
result_hw1 = (e_d_1 + mul_res1 + 0x8000).cast_to(Type.int_64)
# compute ssov
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_H_83_2F_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x83
op2 = 0x2F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.H_83_2F'
op = "{0}{1}".format(bin(8)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = ((d_d & 0xffff0000) + mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
# compute ssov
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_Q_43_07_Inst(Instruction):
""" Multiply-Add Q Format with Rounding instruction:
op = 0x43
op2 = 0x07
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.Q_43_07'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(7)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffffffff))
result = (d_d + mul_res + 0x8000) & 0xffff0000
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDR_Q_43_06_Inst(Instruction):
""" Multiply-Add Q Format with Rounding instruction:
op = 0x43
op2 = 0x06
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.Q_43_06'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(6)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffffffff))
result = (d_d + mul_res + 0x8000) & 0xffff0000
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_Q_43_27_Inst(Instruction):
""" Multiply-Add Q Format with Rounding instruction:
op = 0x43
op2 = 0x27
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.Q_43_27'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(7)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffffffff))
sum_tmp = (d_d + mul_res + 0x8000).cast_to(Type.int_64)
sum_tmp_ssov = ssov32(sum_tmp, self.max_pos, self.max_neg)
result = sum_tmp_ssov & 0xffff0000
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDRS_Q_43_26_Inst(Instruction):
""" Multiply-Add Q Format with Rounding, Saturated instruction:
op = 0x43
op2 = 0x26
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDRS.Q_43_26'
op = "{0}{1}".format(bin(4)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(6)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | (((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc^0xffffffff))
sum_tmp = (d_d + mul_res + 0x8000).cast_to(Type.int_64)
sum_tmp_ssov = ssov32(sum_tmp, self.max_pos, self.max_neg)
result = sum_tmp_ssov & 0xffff0000
# set flags
c = 0
v = overflow(result).cast_to(Type.int_32)
av = advanced_overflow(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSU_H_C3_1A_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x1A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSU.H_C3_1A'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSU_H_C3_19_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSU.H_C3_19'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSU_H_C3_18_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSU.H_C3_18'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSU_H_C3_1B_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSU.H_C3_1B'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUS_H_C3_3A_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x3A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUS.H_C3_3A'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUS_H_C3_39_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDS.H_C3_39'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUS_H_C3_38_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUS.H_C3_38'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUS_H_C3_3B_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format instruction:
op = 0xC3
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUS.H_C3_3B'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 + mul_res1
# compute ssov
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUM_H_C3_1E_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision instruction:
op = 0xC3
op2 = 0x1E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUM.H_C3_1E'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUM_H_C3_1D_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision instruction:
op = 0xC3
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUM.H_C3_1D'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUM_H_C3_1C_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision instruction:
op = 0xC3
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUM.H_C3_1C'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUM_H_C3_1F_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision instruction:
op = 0x83
op2 = 0x1F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUM.H_C3_1F'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUMS_H_C3_3E_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision, Saturated instruction:
op = 0xC3
op2 = 0x3E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUMS.H_C3_3E'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1_ssov
result <<= 32
result |= result_w0_ssov
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUMS_H_C3_3D_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision, Saturated instruction:
op = 0xC3
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUMS.H_C3_3D'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1_ssov
result <<= 32
result |= result_w0_ssov
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUMS_H_C3_3C_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision, Saturated instruction:
op = 0xC3
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUMS.H_C3_3C'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff))
sum1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sum1 << 16)
result_w1 = e_d_1 + (sum1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1_ssov
result <<= 32
result |= result_w0_ssov
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUMS_H_C3_3F_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format Multi-precision, Saturated instruction:
op = 0xC3
op2 = 0x3F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUMS.H_C3_3F'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_w1 = (0x7fffffff & sc1) | ((((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff))
result_w0 = (0x7fffffff & sc0) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff))
sub1 = result_w1 - result_w0
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 + (sub1 << 16)
result_w1 = e_d_1 + (sub1 >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1_ssov
result <<= 32
result |= result_w0_ssov
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MADDSUR_H_C3_0E_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding instruction:
op = 0xC3
op2 = 0x0E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUR.H_C3_0E'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSUR_H_C3_0D_Inst(Instruction):
""" Packed Multiply-Add Q Format with Rounding instruction:
op = 0xC3
op2 = 0x0D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDR.H_C3_0D'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSUR_H_C3_0C_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding instruction:
op = 0xC3
op2 = 0x0C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUR.H_C3_0C'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSUR_H_C3_0F_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding instruction:
op = 0xC3
op2 = 0x0F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSUR.H_C3_0F'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSURS_H_C3_2E_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding, Saturated instruction:
op = 0xC3
op2 = 0x2E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSURS.H_C3_2E'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_hw1_ssov = ssov32(result_hw1, max_pos, max_neg)
result_hw0_ssov = ssov32(result_hw0, max_pos, max_neg)
result = (result_hw1_ssov & 0xffff0000) | (result_hw0_ssov >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSURS_H_C3_2D_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding, Saturated instruction:
op = 0xC3
op2 = 0x2D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSURS.H_C3_2D'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_hw1_ssov = ssov32(result_hw1, max_pos, max_neg)
result_hw0_ssov = ssov32(result_hw0, max_pos, max_neg)
result = (result_hw1_ssov & 0xffff0000) | (result_hw0_ssov >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSURS_H_C3_2C_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding, Saturated instruction:
op = 0xC3
op2 = 0x2C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSURS.H_C3_2C'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_hw1_ssov = ssov32(result_hw1, max_pos, max_neg)
result_hw0_ssov = ssov32(result_hw0, max_pos, max_neg)
result = (result_hw1_ssov & 0xffff0000) | (result_hw0_ssov >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MADDSURS_H_C3_2F_Inst(Instruction):
""" Packed Multiply-Add/Subtract Q Format with Rounding, Saturated instruction:
op = 0xC3
op2 = 0x2F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MADDSURS.H_C3_2F'
op = "{0}{1}".format(bin(0xc)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
d_d = self.get("d{0}".format(self.data['d']), Type.int_32)
result_hw1 = (d_d & 0xffff0000) + mul_res1 + 0x8000
result_hw0 = (d_d << 16) - mul_res0 + 0x8000
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_hw0_ssov = ssov32(result_hw0, max_pos, max_neg)
result_hw1_ssov = ssov32(result_hw1, max_pos, max_neg)
result = (result_hw1_ssov & 0xffff0000) | (result_hw0_ssov >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_H_A3_1A_Inst(Instruction):
""" Packed Multiply-Subtract Q Format instruction:
op = 0xA3
op2 = 0x1A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.H_A3_1A'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_H_A3_19_Inst(Instruction):
""" Packed Multiply-Subtract Q Format instruction:
op = 0xA3
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.H_A3_19'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_H_A3_18_Inst(Instruction):
""" Packed Multiply-Subtract Q Format instruction:
op = 0xA3
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.H_A3_18'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_H_A3_1B_Inst(Instruction):
""" Packed Multiply-Subtract Q Format instruction:
op = 0xA3
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.H_A3_1B'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_H_A3_3A_Inst(Instruction):
""" Packed Multiply-Subtract Q Format, Saturated instruction:
op = 0xA3
op2 = 0x3A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.H_A3_3A'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = (((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = (((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0 = ssov32(e_d_0 - mul_res0, max_pos, max_neg)
result_w1 = ssov32(e_d_1 - mul_res1, max_pos, max_neg)
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_H_A3_39_Inst(Instruction):
""" Packed Multiply-Subtract Q Format, Saturated instruction:
op = 0xA3
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.H_A3_39'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b & 0xffff)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_H_A3_38_Inst(Instruction):
""" Packed Multiply-Subtract Q Format, Saturated instruction:
op = 0xA3
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.H_A3_38'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_H_A3_3B_Inst(Instruction):
""" Packed Multiply-Subtract Q Format, Saturated instruction:
op = 0xA3
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.H_A3_3B'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | (((d_a & 0xffff) * (d_b >> 16)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | (((d_a >> 16) * (d_b >> 16)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - mul_res0
result_w1 = e_d_1 - mul_res1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0_ssov)
ov_w1 = overflow(result_w1_ssov)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0_ssov)
aov_w1 = advanced_overflow(result_w1_ssov)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_Q_63_02_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x02
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_02'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(2)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d - (((d_a * d_b) << n.value) >> 32)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_Q_63_1B_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_1B'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * d_b) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_Q_63_01_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x01
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_01'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(1)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d - (((d_a * (d_b & 0xffff)) << n.value) >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_Q_63_19_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_19'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b & 0xffff)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_Q_63_00_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x00
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_00'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result = d_d - (((d_a * (d_b >> 16)) << n.value) >> 16)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_Q_63_18_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_18'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b >> 0xffff)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - result_tmp
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_Q_63_05_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x05
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_05'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(5)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
result = d_d - mul_res
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_Q_63_1D_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_1D'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - (mul_res << 16)
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUB_Q_63_04_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x04
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_04'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(4)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
result = d_d - mul_res
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUB_Q_63_1C_Inst(Instruction):
""" Multiply-Subtract Q Format instruction:
op = 0x63
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUB.Q_63_1C'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - (mul_res << 16)
result_w1 = e_d_1
# put results
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_Q_63_22_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x22
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_22'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(2)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d - (((d_a * d_b) << n.value) >> 32)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUBS_Q_63_3B_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_3B'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * d_b) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
d_d_64_bit = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result_64_bit = d_d_64_bit - result_tmp.cast_to(Type.int_64)
result_w0 = (result_64_bit & 0xffffffff).cast_to(Type.int_32)
result_w1 = (result_64_bit >> 32).cast_to(Type.int_32)
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
v = overflow_64(result_64_bit).cast_to(Type.int_32)
av = advanced_overflow_64(result_64_bit).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_Q_63_21_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x21
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_21'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(1)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d - (((d_a * (d_b & 0xffff)) << n.value) >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUBS_Q_63_39_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_39'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b & 0xffff)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
d_d_64_bit = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result_64_bit = d_d_64_bit - result_tmp.cast_to(Type.int_64)
result_w0 = (result_64_bit & 0xffffffff).cast_to(Type.int_32)
result_w1 = (result_64_bit >> 32).cast_to(Type.int_32)
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
v = overflow_64(result_64_bit).cast_to(Type.int_32)
av = advanced_overflow_64(result_64_bit).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_Q_63_20_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x20
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_20'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
result1 = d_d - (((d_a * (d_b >> 16)) << n.value) >> 16)
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUBS_Q_63_38_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_38'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
result_tmp = (d_a * (d_b >> 16)) << n.value
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - result_tmp
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_Q_63_25_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x25
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_25'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(5)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
result1 = d_d - mul_res
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUBS_Q_63_3D_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x63
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_3D'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a & 0xffff) * (d_b & 0xffff)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
d_d_64_bit = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result_64_bit = d_d_64_bit - (mul_res.cast_to(Type.int_64) << 16)
result_w0 = (result_64_bit & 0xffffffff).cast_to(Type.int_32)
result_w1 = (result_64_bit >> 32).cast_to(Type.int_32)
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
v = overflow_64(result_64_bit).cast_to(Type.int_32)
av = advanced_overflow_64(result_64_bit).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBS_Q_63_24_Inst(Instruction):
""" Multiply-Subtract Q Format, Saturated instruction:
op = 0x63
op2 = 0x24
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_24'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(4)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
result1 = d_d - mul_res
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result = ssov32(result1, max_pos, max_neg)
# set flags
c = 0
v = overflow(result)
av = advanced_overflow(result)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
return result
def commit_result(self, res):
self.put(res, self.get_dst_reg())
class RRR1_MSUBS_Q_63_3C_Inst(Instruction):
""" Multiply-Add Q Format, Saturated instruction:
op = 0x63
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBS.Q_63_3C'
op = "{0}{1}".format(bin(6)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc = extend_to_16_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res = (0x7fffffff & sc) | ((((d_a >> 16) * (d_b >> 16)) << n.value) & (sc^0xffff))
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][62:32]
result_w0 = e_d_0 - (mul_res << 16)
result_w1 = e_d_1
# compute ssov32
max_pos = self.constant(INT32_MAX_POS, Type.int_32)
max_neg = self.constant(INT32_MAX_NEG, Type.int_32)
result_w0_ssov = ssov32(result_w0, max_pos, max_neg)
result_w1_ssov = ssov32(result_w1, max_pos, max_neg)
# put results
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# prepare 64-bit object for setting flags
result = result_w1
result <<= 32
result |= result_w0
# set flags
c = 0
v = overflow_64(result).cast_to(Type.int_32)
av = advanced_overflow_64(result).cast_to(Type.int_32)
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBAD_H_E3_1A_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format instruction:
op = 0xE3
op2 = 0x1A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBAD.H_E3_1A'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = e_d_1 - mul_res1
result_w0 = e_d_0 + mul_res0
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBAD_H_E3_19_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format instruction:
op = 0xE3
op2 = 0x19
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBAD.H_E3_19'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = e_d_1 - mul_res1
result_w0 = e_d_0 + mul_res0
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBAD_H_E3_18_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format instruction:
op = 0xE3
op2 = 0x18
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBAD.H_E3_18'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = e_d_1 - mul_res1
result_w0 = e_d_0 + mul_res0
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBAD_H_E3_1B_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format instruction:
op = 0xE3
op2 = 0x1B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBAD.H_E3_1B'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = e_d_1 - mul_res1
result_w0 = e_d_0 + mul_res0
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADS_H_E3_3A_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format, Saturated instruction:
op = 0xE3
op2 = 0x3A
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADS.H_E3_3A'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xa)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = ssov32(e_d_1 - mul_res1, self.max_pos, self.max_neg)
result_w0 = ssov32(e_d_0 + mul_res0, self.max_pos, self.max_neg)
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_w0)
ov_w1 = overflow(result_w1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_w0)
aov_w1 = advanced_overflow(result_w1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADS_H_E3_39_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format, Saturated instruction:
op = 0xE3
op2 = 0x39
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADS.H_E3_39'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(9)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = (e_d_1 - mul_res1).cast_to(Type.int_64)
result_w0 = (e_d_0 + mul_res0).cast_to(Type.int_64)
result_w1_ssov = ssov32(result_w1, self.max_pos, self.max_neg)
result_w0_ssov = ssov32(result_w0, self.max_pos, self.max_neg)
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow_64(result_w0).cast_to(Type.int_32)
ov_w1 = overflow_64(result_w1).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow_64(result_w0).cast_to(Type.int_32)
aov_w1 = advanced_overflow_64(result_w1).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADS_H_E3_38_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format, Saturated instruction:
op = 0xE3
op2 = 0x38
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADS.H_E3_38'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(8)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = (e_d_1 - mul_res1).cast_to(Type.int_64)
result_w0 = (e_d_0 + mul_res0).cast_to(Type.int_64)
result_w1_ssov = ssov32(result_w1, self.max_pos, self.max_neg)
result_w0_ssov = ssov32(result_w0, self.max_pos, self.max_neg)
self.put(result_w0_ssov, "d{0}".format(self.data['c']))
self.put(result_w1_ssov, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow_64(result_w0).cast_to(Type.int_32)
ov_w1 = overflow_64(result_w1).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow_64(result_w0).cast_to(Type.int_32)
aov_w1 = advanced_overflow_64(result_w1).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADS_H_E3_3B_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format, Saturated instruction:
op = 0xE3
op2 = 0x3B
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADS.H_E3_3B'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xb)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
result_w1 = (e_d_1 - mul_res1).cast_to(Type.int_64)
result_w0 = (e_d_0 + mul_res0).cast_to(Type.int_64)
self.put(result_w0, "d{0}".format(self.data['c']))
self.put(result_w1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow_64(result_w0).cast_to(Type.int_32)
ov_w1 = overflow_64(result_w1).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow_64(result_w0).cast_to(Type.int_32)
aov_w1 = advanced_overflow_64(result_w1).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADM_H_E3_1E_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision instruction:
op = 0xE3
op2 = 0x1E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADM.H_E3_1E'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((result_word1 - result_word0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADM_H_E3_1D_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision instruction:
op = 0xE3
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADM.H_E3_1D'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((result_word1 - result_word0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADM_H_E3_1C_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision instruction:
op = 0xE3
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADM.H_E3_1C'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((result_word1 - result_word0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADM_H_E3_1F_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision instruction:
op = 0xE3
op2 = 0x1F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADM.H_E3_1F'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((result_word1 - result_word0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADMS_H_E3_3E_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision, Saturated instruction:
op = 0xE3
op2 = 0x3E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADMS.H_E3_3E'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sub_words = (result_word1 - result_word0).cast_to(Type.int_64) << 16
result = e_d - sub_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sub_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADMS_H_E3_3D_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision, Saturated instruction:
op = 0xE3
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADMS.H_E3_3D'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sub_words = (result_word1 - result_word0).cast_to(Type.int_64) << 16
result = e_d - sub_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sub_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADMS_H_E3_3C_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision, Saturated instruction:
op = 0xE3
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADMS.H_E3_3C'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sub_words = (result_word1 - result_word0).cast_to(Type.int_64) << 16
result = e_d - sub_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sub_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADMS_H_E3_3F_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format-Multi-precision, Saturated instruction:
op = 0xE3
op2 = 0x3F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADMS.H_E3_3F'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
result_word1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
result_word0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sub_words = (result_word1 - result_word0).cast_to(Type.int_64) << 16
result = e_d - sub_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sub_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADR_H_E3_0E_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding instruction:
op = 0xE3
op2 = 0x0E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADR.H_E3_0E'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
result_hw1 = (d_d & 0xffff0000) - mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result)
ov_w1 = overflow(result)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result)
aov_w1 = advanced_overflow(result)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADR_H_E3_0D_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding instruction:
op = 0xE3
op2 = 0x0D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADR.H_E3_0D'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
result_hw1 = (d_d & 0xffff0000) - mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result)
ov_w1 = overflow(result)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result)
aov_w1 = advanced_overflow(result)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADR_H_E3_0C_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding instruction:
op = 0xE3
op2 = 0x0C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADR.H_E3_0C'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
result_hw1 = (d_d & 0xffff0000) - mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result)
ov_w1 = overflow(result)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result)
aov_w1 = advanced_overflow(result)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADR_H_E3_0F_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding instruction:
op = 0xE3
op2 = 0x0F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADR.H_E3_0F'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(0)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
result_hw1 = (d_d & 0xffff0000) - mul_res1 + 0x8000
result_hw0 = (d_d << 16) + mul_res0 + 0x8000
result = (result_hw1 & 0xffff0000) | (result_hw0 >> 16)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result)
ov_w1 = overflow(result)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result)
aov_w1 = advanced_overflow(result)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADRS_H_E3_2E_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding, Saturated instruction:
op = 0xE3
op2 = 0x2E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADRS.H_E3_2E'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
result_hw1 = ((d_d & 0xffff0000) - mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result).cast_to(Type.int_32)
ov_w1 = overflow(result).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result).cast_to(Type.int_32)
aov_w1 = advanced_overflow(result).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADRS_H_E3_2D_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding, Saturated instruction:
op = 0xE3
op2 = 0x2D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADRS.H_E3_2D'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
result_hw1 = ((d_d & 0xffff0000) - mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result).cast_to(Type.int_32)
ov_w1 = overflow(result).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result).cast_to(Type.int_32)
aov_w1 = advanced_overflow(result).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADRS_H_E3_2C_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding, Saturated instruction:
op = 0xE3
op2 = 0x2C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADRS.H_E3_2C'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) & (sc0^0xffffffff)
result_hw1 = ((d_d & 0xffff0000) - mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result).cast_to(Type.int_32)
ov_w1 = overflow(result).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result).cast_to(Type.int_32)
aov_w1 = advanced_overflow(result).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBADRS_H_E3_2F_Inst(Instruction):
""" Packed Multiply-Subtract/Add Q Format with Rounding, Saturated instruction:
op = 0xE3
op2 = 0x2F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBADRS.H_E3_2F'
op = "{0}{1}".format(bin(0xe)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(2)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT32_MAX_POS, Type.int_32).cast_to(Type.int_64, signed=True)
@property
def max_neg(self):
return self.constant(INT32_MAX_NEG, Type.int_32).cast_to(Type.int_64, signed=True)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_d(self):
return self.get("d{0}".format(self.data['d']), Type.int_32)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_d_d(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
d_d = args[2]
n = args[3]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = (0x7fffffff & sc1) | ((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) & (sc1^0xffffffff)
mul_res0 = (0x7fffffff & sc0) | ((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) & (sc0^0xffffffff)
result_hw1 = ((d_d & 0xffff0000) - mul_res1 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw0 = ((d_d << 16) + mul_res0 + 0x8000).cast_to(Type.int_64, signed=True)
result_hw1_ssov = ssov32(result_hw1, self.max_pos, self.max_neg)
result_hw0_ssov = ssov32(result_hw0, self.max_pos, self.max_neg)
result = (result_hw1_ssov & 0xffff0000) | ((result_hw0_ssov >> 16) & 0xffff)
self.put(result, "d{0}".format(self.data['c']))
# set flags
c = 0
ov_w0 = overflow(result).cast_to(Type.int_32)
ov_w1 = overflow(result).cast_to(Type.int_32)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result).cast_to(Type.int_32)
aov_w1 = advanced_overflow(result).cast_to(Type.int_32)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBM_H_A3_1E_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision instruction:
op = 0xA3
op2 = 0x1E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBM.H_A3_1E'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((mul_res1 + mul_res0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBM_H_A3_1D_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision instruction:
op = 0xA3
op2 = 0x1D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBM.H_A3_1D'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((mul_res1 + mul_res0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBM_H_A3_1C_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision instruction:
op = 0xA3
op2 = 0x1C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBM.H_A3_1C'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((mul_res1 + mul_res0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBM_H_A3_1F_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision instruction:
op = 0xA3
op2 = 0x1F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBM.H_A3_1F'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(1)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
result = e_d - ((mul_res1 + mul_res0) << 16)
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBMS_H_A3_3E_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision, Saturated instruction:
op = 0xA3
op2 = 0x3E
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBMS.H_A3_3E'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xe)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sum_words = (mul_res1 + mul_res0) << 16
result = e_d - sum_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sum_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c'])) # E[c][31:0]
self.put(result_1, "d{0}".format(self.data['c']+1)) # E[c][63:32]
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBMS_H_A3_3D_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision, Saturated instruction:
op = 0xA3
op2 = 0x3D
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBMS.H_A3_3D'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xd)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,0)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sum_words = (mul_res1 + mul_res0) << 16
result = e_d - sum_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sum_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBMS_H_A3_3C_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision, Saturated instruction:
op = 0xA3
op2 = 0x3C
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBMS.H_A3_3C'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xc)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b & 0xffff) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,0) * extract_16s(d_b,0)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sum_words = (mul_res1 + mul_res0) << 16
result = e_d - sum_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sum_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
class RRR1_MSUBMS_H_A3_3F_Inst(Instruction):
""" Packed Multiply-Subtract Q Format-Multi-precision, Saturated instruction:
op = 0xA3
op2 = 0x3F
User Status Flags: V, SV, AV, SAV
"""
name = 'RRR1_MSUBMS.H_A3_3F'
op = "{0}{1}".format(bin(0xa)[2:].zfill(4), bin(3)[2:].zfill(4))
op2_1 = "{0}".format(bin(3)[2:].zfill(2))
op2_2 = "{0}".format(bin(0xf)[2:].zfill(4))
bin_format = op + 'b'*4 + 'a'*4 + op2_1 + op2_2 + 'n'*2 + 'c'*4 + 'd'*4
def parse(self, bitstrm):
data = Instruction.parse(self, bitstrm)
data = {"a": int(data['a'], 2),
"b": int(data['b'], 2),
"c": int(data['c'], 2),
"d": int(data['d'], 2),
"n": int(data['n'], 2)}
log_this(self.name, data, hex(self.addr))
return data
@property
def max_pos(self):
return self.constant(INT64_MAX_POS, Type.int_64)
@property
def max_neg(self):
return self.constant(INT64_MAX_NEG, Type.int_64)
def get_dst_reg(self):
return "d{0}".format(self.data['c'])
def get_psw(self):
return self.get("psw", Type.int_32)
def get_n(self):
return self.constant(self.data['n'], Type.int_2)
def get_d_b(self):
return self.get("d{0}".format(self.data['b']), Type.int_32)
def get_d_a(self):
return self.get("d{0}".format(self.data['a']), Type.int_32)
def fetch_operands(self):
return self.get_d_a(), self.get_d_b(), self.get_n()
def compute_result(self, *args):
d_a = args[0]
d_b = args[1]
n = args[2]
sc1 = extend_to_32_bits(((d_a & 0xffff) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
sc0 = extend_to_32_bits(((d_a >> 16) == 0x8000) & ((d_b >> 16) == 0x8000) & (n == 1).cast_to(Type.int_32))
mul_res1 = ((0x7fffffff & sc1) | \
((extract_16s(d_a,0) * extract_16s(d_b,1)) << n.value) &
(sc1^0xffffffff)).cast_to(Type.int_64)
mul_res0 = ((0x7fffffff & sc0) | \
((extract_16s(d_a,1) * extract_16s(d_b,1)) << n.value) &
(sc0^0xffffffff)).cast_to(Type.int_64)
e_d_0 = self.get("d{0}".format(self.data['d']), Type.int_32) # E[d][31:0]
e_d_1 = self.get("d{0}".format(self.data['d']+1), Type.int_32) # E[d][63:32]
e_d = (e_d_1.cast_to(Type.int_64) << 32) | e_d_0.cast_to(Type.int_64)
sum_words = (mul_res1 + mul_res0) << 16
result = e_d - sum_words
# compute SSOV64
ovf_val = (result ^ e_d) & (e_d ^ sum_words)
cond_ovf_neg = extend_bits((ovf_val<0), 64)
cond_e_d_pos = extend_bits((e_d >= 0), 64)
result = (self.max_pos & cond_ovf_neg & cond_e_d_pos) | \
(self.max_neg & cond_ovf_neg & (cond_e_d_pos^0xffffffffffffffff)) | \
(result & (cond_ovf_neg^0xffffffffffffffff))
result_0 = (result & 0xffffffff).cast_to(Type.int_32)
result_1 = (result >> 32).cast_to(Type.int_32)
self.put(result_0, "d{0}".format(self.data['c']))
self.put(result_1, "d{0}".format(self.data['c']+1))
# set flags
c = 0
ov_w0 = overflow(result_0)
ov_w1 = overflow(result_1)
v = ov_w1 | ov_w0
aov_w0 = advanced_overflow(result_0)
aov_w1 = advanced_overflow(result_1)
av = aov_w1 | aov_w0
psw = self.get_psw()
cond_sv = (v == 0)
cond_sav = (av == 0)
sv = ((psw & SV_MASK) & cond_sv) | (1 & (cond_sv^1))
sav = ((psw & ASV_MASK) & cond_sav) | (1 & (cond_sav^1))
psw = set_usb(psw, c, v, sv, av, sav)
self.put(psw, "psw")
| 35.773832
| 120
| 0.535615
| 60,965
| 379,775
| 3.12148
| 0.004445
| 0.050284
| 0.05155
| 0.05032
| 0.998707
| 0.997846
| 0.995591
| 0.995502
| 0.994656
| 0.994656
| 0
| 0.076479
| 0.275324
| 379,775
| 10,615
| 121
| 35.777202
| 0.614987
| 0.05572
| 0
| 0.959256
| 0
| 0
| 0.031248
| 0.000951
| 0
| 0
| 0.028365
| 0.000094
| 0
| 1
| 0.156909
| false
| 0
| 0.000396
| 0.116034
| 0.402558
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
49f8527747891fa9d8acc5cd1da0dbab88435214
| 61
|
py
|
Python
|
Tests/Bugs/other/r_jeff_epler_1_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 5
|
2019-05-26T20:48:36.000Z
|
2021-07-09T01:38:38.000Z
|
Tests/Bugs/other/r_jeff_epler_1_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | null | null | null |
Tests/Bugs/other/r_jeff_epler_1_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 1
|
2022-02-10T07:14:58.000Z
|
2022-02-10T07:14:58.000Z
|
import r_jeff_epler_1
print r_jeff_epler_1.blowup([2, 3, 5])
| 20.333333
| 38
| 0.786885
| 14
| 61
| 3
| 0.714286
| 0.238095
| 0.47619
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.098361
| 61
| 2
| 39
| 30.5
| 0.672727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
3fcab72888ed37426d3f75326ad80c5e2ac4bf93
| 683,381
|
py
|
Python
|
projects/src/main/python/CodeJam/Y13R5P1/JongMan/generated_py_623d5a5ed9124daf833506d5b6c57318.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 5
|
2020-04-05T18:04:13.000Z
|
2021-04-13T20:34:19.000Z
|
projects/src/main/python/CodeJam/Y13R5P1/JongMan/generated_py_623d5a5ed9124daf833506d5b6c57318.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 1
|
2020-04-29T21:42:26.000Z
|
2020-05-01T23:45:45.000Z
|
projects/src/main/python/CodeJam/Y13R5P1/JongMan/generated_py_623d5a5ed9124daf833506d5b6c57318.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 3
|
2020-01-27T16:02:14.000Z
|
2021-02-08T13:25:15.000Z
|
import sys
sys.path.append('/home/george2/Raise/ProgramRepair/CodeSeer/projects/src/main/python')
from CodeJam.Y13R5P1.JongMan.roulette2 import *
def func_b3607c87abf94237a6dc3992ce5b3f09(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
return partial
def func_af4aecab5b3b43698a7d87c2fc9612eb(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
return p
def func_1ef2d23a5b9742beb274b24a8661c68f(lowest_cnt):
if lowest_cnt == 0:
return 0
ret = 0.0
return ret
def func_1f456037a7f44e88abe43a7664d8dc38(lowest_cnt, lowest, only_mine):
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return ret
def func_f92bd295d2984f4d80cbf903837be8a2(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
return only_mine
def func_b4571d5f4f5e4f2291203ec3b53b1b57(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
return partial
def func_89207f0081b24f9c882a0fe8ff87e567(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
return p
def func_aa6ff703a869404ab5ccba8fb314dcdc(lowest_cnt, lowest, only_mine):
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return ret
def func_05b034cd4596424aa9f727b25ae9799a(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
return lowest_cnt
def func_ee77ce06116d4dbf8dcdf4271624d151(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
return partial
def func_425d397183b445779a8913011fabce9d(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
return only_mine
def func_d1b63fd727154d62a0e51091702505a1(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
return p
def func_56650768666e4576b35c08ae0a8f3869(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
return p
def func_5ad99e5dabca410e8c85829604e3b7c3(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
return only_mine
def func_cd60b6989fa34950b4f881aa607c9ec1(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
return partial
def func_cc50f64d990a4d2c9d17839f0df99e5c(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
return lowest_cnt
def func_cace8cb98f8a477ca8abb3e96100f1ed(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
return only_mine
def func_9d1a53f5eae243e3a8a282b0e61e67b1(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
return ret
def func_a67fb82fe9964d89bbcfd06cc1ffd4fc(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
return p
def func_8d71ceedbe7e4d099e7ac25c72a969cf(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
return lowest_cnt
def func_debb2352e83b4aa3ba817d1a93fed623(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
return partial
def func_f3c397f88f4647b5bdd387cf7f939f7f(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return lowest_cnt
def func_4e416694a021440aae7bd0243ccc6343(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return only_mine
def func_9bea198fe2ef47838825c59464b01660(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return partial
def func_8ca5932e47da4ec395984bc0243ac390(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return ret
def func_0a833f94d5dc457ab03800aecff043da(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
return p
def func_b5023ac8c4654db2b4c81f5440079cf4(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return only_mine
def func_93fb4b4a66cf4947801fa25a5f3905ad(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return partial
def func_2054f2fcfa7e41dbb4a8573d899868ef(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return ret
def func_16fc15362ae44de382a7cf8b6300aaf0(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return p
def func_313ca3bbd50b4776b8e1cdd5a4237c60(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return lowest_cnt
def func_8ccb6be215f946c096f353ecc7e1f8ad(lowest, placed, override):
partial = [p for p in placed if p <= lowest]
if override > 0:
partial = partial[:-override]
only_mine = 37 - len(placed)
lowest_cnt = only_mine + len(partial)
if lowest_cnt == 0:
return 0
ret = 0.0
ret += lowest * 36.0 * only_mine / float(lowest_cnt)
for p in partial:
ret += 36.0 * (lowest - p) * 1.0 / lowest_cnt
return ret
def func_e1a520bf58c74b5f95fd5e1fb9835983(lowest_cnt, lowest,
remaining_budget, can_replicate, larger):
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget / lowest_cnt
)
return next_larger
def func_717b5659538b41bd88c3ece95f35a098(lowest, placed, exclude,
needed_budget):
cand = get_expected(placed, lowest, exclude) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_53abe815e79f40a1b75b6da20024e6e1(lowest, placed, exclude,
needed_budget):
cand = get_expected(placed, lowest, exclude) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_0a79f5bb24484ca39aec13faa8111d46(lowest, placed):
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
return needed_budget
def func_ab9640aa475841d6891a8dee2b2ca9c4(lowest, placed):
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
return p
def func_586c631fab234f4abee0ae2b83a97524(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
return p
def func_4c057f6f577c4d45b063e72434a1a8b9(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
return remaining_budget
def func_f786d824fa0743ab91f810f4b5b2ecc6(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
return partial
def func_239cff9d8f2a4c149f4bee34d2a26c66(lowest, placed):
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return partial
def func_1faf349b06954d568194f1b2fdc2a331(lowest, placed):
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return p
def func_9e543731501f45b7a9f7111e08dc6fee(lowest, placed):
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return lowest_cnt
def func_a10e776df323436c80bf480b9fc7a7ca(lowest_cnt, lowest,
remaining_budget, can_replicate, placed):
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
return larger
def func_6d48a285cace454baa0cbf40e34a8ace(lowest_cnt, lowest,
remaining_budget, can_replicate, placed):
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
return next_larger
def func_faa8e882935c4474ab27832a082ddeb4(lowest_cnt, lowest,
remaining_budget, can_replicate, placed):
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
return p
def func_0ccd2127f2214a5caa91cd8829a4d56d(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return p
def func_823ff6d2a5a745c2b4d5c99dca6bf1f8(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return lowest_cnt
def func_d61fa8489acc46ac8c5a3d2490865ffb(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return remaining_budget
def func_365835b6f7484ef28c09afbc58638e15(lowest, budget, placed, needed_budget
):
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
return partial
def func_6875a669551a4eba80d27edaf9ab29da(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
return budget
def func_083d5dfe2aed4a6f8e8cc78f4922fe2a(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
return bets
def func_e4af148c84f94b669df248bb3bfc5a36(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
return placed
def func_37511302e85e4608a016bf88c5d04943(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return placed
def func_a629d90826ed496883b5466c5eb53a3f(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return ret
def func_83b242b2d1fa4e4fa7bca9942c560ab2(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return queue
def func_4b8e3f3803e8494aa0b4c9d0dcdae0a8(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return ret
def func_7d4ef7b2dd064ef093c471ba71ceeb2f(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return p
def func_b163268f8ff64d3f84e3aaa6d8010b7b(placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return queue
def func_6ef8e71e217a4110bac33d08a52272ad(placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return p
def func_88c0d8eea3ce4744a926669c6082994e():
queue = sorted(set(queue))
seen = set(queue)
return queue
def func_49238305546a42b88fab1e662bc3886f():
queue = sorted(set(queue))
seen = set(queue)
return seen
def func_ce981d0535964624bf3ee9531c331713(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return ret
def func_c1fd60988f424224aad069548db86cb3(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return bets
def func_a5ecdfa35a234ecab08849a0d96e3eb5(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return placed
def func_6a2ab7d45b58407abcc316b7add8fc78(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
return budget
def func_26126a771ebf4efba104529689bafd56(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return placed
def func_f59c5bdd3baf4e7ba6fa44bcdc21f8ad(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return p
def func_31618675966746e8a3b27760bcb0835c(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return queue
def func_99f09453144841aba7d754444625a900(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return ret
def func_7712e59f1d0f481fbe89bcc5f0aac879(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return ret
def func_a8c596f467194fb1b2d14694fa657671(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return queue
def func_46d8aef8af3a4db38f56975d1aefd163(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return p
def func_adbd3f3140124e6190f58d7026e60ce3(placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return seen
def func_024786bb17704b50afeed4c0be4abf3d(placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return p
def func_85f0ac2c5e5c456f8d538f8e34213058(placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return queue
def func_a9f293f95d944e7d866f0d196e3e2d57(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_4be1d115232a411493011c98316bf5c7(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return partial
def func_60f3a5452e9747acb6450bf5dca833d1(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return larger
def func_173eab38ea1a4067bff3779d02068566(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest
def func_1cb3add9f5a244d4bf75fcdf4c1e97d0(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return exclude
def func_35a92a405dda4aa9bdd5424b16d64394(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return remaining_budget
def func_f965f33635b54fdab6bef89dfcc89751(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return queue
def func_54ebca06e8c94c8fbe08cbcacdc1a7c5(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest_cnt
def func_2dc61e8836d049228aa560f269cf6212(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return needed_budget
def func_848e3dd00b6f4a5587339e269a2ac980(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return next_larger
def func_9201b586d2bb48b1b67b9c2ac979bc7d(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return p
def func_f258f5e37b604b3da0ac0c85a02982f0(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return seen
def func_3e01a7e9d3e2479287aa5ceed777784a(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return can_replicate
def func_7c6f95a6417c4872883e34c1b0b51dd9(budget, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_7543941daf1f4f038456594e7b6e7269(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return bets
def func_e0497990b4ef4b0d83a4c7d4be2c85d6(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return p
def func_dec1e042c1ce4a9ea4eab8d6605b2690(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return budget
def func_b73b99b05a6f4a4b87086ec200248db7(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return placed
def func_bb98bb02b80941f8982743aaba5991eb(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return ret
def func_435d5f517c2742029461a81bb808dceb(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
return queue
def func_f99950bcfbc3464abfbef40a5ea4cdf0(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return queue
def func_9235167e61604887b366097ed2b99239(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return p
def func_ffc8073941f740ed9fb51309802b4fdb(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return ret
def func_0041b49d8afa4ef18301fdeacea88257(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return placed
def func_265673fa1dd840d7aa17bb82706d27b9(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return p
def func_a0056a7262b441039860d046ca992b88(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return ret
def func_9c11471050fc48fe9e5f7facb70e4c26(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return queue
def func_4f865c4a702244d1be54f33203b0b368(placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return seen
def func_9243d7ed6d9b48cba0d746db46cd269f(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return remaining_budget
def func_4e393aa5f5584bf8bd8a2742b11c1776(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return seen
def func_ae9c66516d354391864931f32ed23dcd(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_418acfab21b341c7863b413014cddc82(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_016c2c5ca7e4470c969584bfc6cd72f1(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return next_larger
def func_d0d3c841431f47e4b717a1b7c577396c(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return queue
def func_72b2d94a87b147aabe50336749e37879(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return exclude
def func_67eee9de990443e085ab6d1167d499a4(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return partial
def func_462da3106c414a9c91acd8f9f47cbdfd(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return needed_budget
def func_8d11fb75c9f64f649744f56293634ca3(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return can_replicate
def func_2c657999f97648419f4bb97200dffaaf(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest
def func_7e43943e75a34c27a22b76c99d5f1a9d(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest_cnt
def func_d62de335f450463198e1ef3d73e36a60(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return p
def func_5cd38c58d4cf427db53336b6d407c668(budget, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return larger
def func_7b0a748f0d1748a7826768d5660e8fea(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return can_replicate
def func_10daafd3c0f648b48d0c629b51d9a68d(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return needed_budget
def func_c7d838ac8c7a4a8d9d8e58f411210adb(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return p
def func_d17f06f02f8a459ab2621e9229925209(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return partial
def func_193c4bae9b8c4b288a64ffceaa1357da(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return remaining_budget
def func_f2cea0e383194413943eeeb713f132a0(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return larger
def func_79e6f9e552d54076b8120b78f1b74c16(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return queue
def func_acb5e7a6c4d542c796f44042db9c89b8(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return cand
def func_d4514fa8f2204062acede068a879b881(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return ret
def func_b771b5acc080494ba562adf4b3409267(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return next_larger
def func_d24eaae067a54042849fed825d87a2a5(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest
def func_ea3a8ec07d5c4b75aa68097c3cfc2056(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest_cnt
def func_d2edd5919d3b4bc09e86f1823ee8b3f8(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return seen
def func_e9244afb5e0b4516abbabd813bacfda3(budget, cc, placed):
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return exclude
def func_8a3eedaae84a4ac0a4ababa1da21a8fd(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return p
def func_025cba4c7ac748a599cb834cf3359f00(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return queue
def func_7b7feac682d94ac7ba4d052bda8ee83d(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return ret
def func_5ac0bcb86df047dbb63f820394789752(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return budget
def func_7545214d91974685b68063f41ddd4dc8(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return bets
def func_32342c9481784983ae6f68d22a4508a7(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
return placed
def func_4333d964f5cf4636bb9ee15c636daa77(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return ret
def func_2b817e14313045ebab7dbfcb94ea801c(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return queue
def func_211e5dca28ea42e483294e4cd1a9b770(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return p
def func_ecb447c251bc40a98ebfe5c4a441968f(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return seen
def func_fd243e02442d4db8951cfbae96c60fde(infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return placed
def func_ef66c0f32e634e5196fd6c5806fbeb73(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return next_larger
def func_66ab7686dc274650a64d0b0bd841ecd9(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_6ea4ecb0027249d48d09c0e44f86592f(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return queue
def func_5aaaeb947cf24badb29d2bfade201bf8(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return partial
def func_145ea1523499472c8e10a32d1d4fcc0b(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return larger
def func_f3643d51ae9b4dcfa05275ff84e994a0(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return exclude
def func_fa5826c503454255b6e198c489d3338d(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return remaining_budget
def func_346669cf7fdb42f08a0d0e1ae00d3231(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return needed_budget
def func_9b879c5d032e4dc8a93bd1e06ad3c777(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest_cnt
def func_9fce7021afb7428f97af9c8f9bfc453e(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return can_replicate
def func_74ddda17a48d4ad8a71f939e6f1d2a07(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest
def func_934d186d2493471f97a636549a9d331d(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_0c00f14afddb48daa8444e73770dc1d5(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return seen
def func_5252f512145a427aa25a240b2477d5bf(budget, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return p
def func_96c52d559dfc4958900f75d7a7bbf83d(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return needed_budget
def func_543644ca3582410e8a52a5495730db23(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return queue
def func_dee43b4a4abe4ddab0704e3cec0c8ddb(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return p
def func_b3f12384b40e4e678c884d41caf319d2(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return ret
def func_f29e053c2e1e4b2ea0fb8efd05c00b42(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return cand
def func_72494d8167ad41afb91c9461e5d7af22(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return larger
def func_0ff370eaca534bafab02aa14500b9415(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return seen
def func_181e3ae275e54f6cae304228d5016090(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest_cnt
def func_2cf9d754159a457eb28b55daeb235fd9(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return exclude
def func_d40062eb1a3948898baf065f86f0ede1(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return partial
def func_b95d26c918f344458c6ebe66b7051767(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return can_replicate
def func_d8567a2bad1d46c6a23c6ce20a31d8c8(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return remaining_budget
def func_13aba82267cd4457a8a1e4b63b44aa3d(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest
def func_fc176ac0bfb34c2691613ec2a5fdc75c(budget, cc, placed):
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return next_larger
def func_2d49266271214aaa95d35f24b76ec595(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return seen
def func_f2b717277015480aa81aead5107e1714(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return queue
def func_55991d4d81b847b5ab9c935dd4740b43(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return ret
def func_ea807d1ab8ce4b54bebc0e342495390d(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return bets
def func_afec952bb37f4eb28ab4c1177da659b1(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return p
def func_542440f6c97e475186a629cf67c30bd9(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return budget
def func_a58808f936184171adf4ca127385aa47(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
return placed
def func_69269cfcb1df46e5bce82c1642a1d1c6(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_217e5e81e67d4bc592872b6e60df7c0f(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_e9246604588c46948de89237bdd32275(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return needed_budget
def func_dae21bdf565e4f90aae2973ca9e7486f(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest
def func_db17c5e8503b46ad8bbf8931e5b32e52(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return exclude
def func_196b7d60df08426fb70b727c94599617(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return can_replicate
def func_302cc621a81d4cc7b7db19e2f2ae6438(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return next_larger
def func_7de616bc0ae6460098bdcacd7dae81a1(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return p
def func_44c7df8fe0154deaa19b7f1e6ff1352e(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest_cnt
def func_bae7f480a58c4213a216a5cf8ad20077(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return queue
def func_f72aa9ed781d45b4876e17a7560a90d8(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return larger
def func_c09e92806bcc4f7885272b1e4feb6974(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return partial
def func_686c2cf465754d5aba6dd4d21ad63bd0(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return seen
def func_64439de98f614163993b60f484a64160(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return remaining_budget
def func_f259fde834fb4b13a3a17a7d7e838c5c(budget, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return placed
def func_eecffcd8912a4bd4abe277464c745fec(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return ret
def func_494af83cd97d487ebace16517c0fe661(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return remaining_budget
def func_5493036f88954b8a8e422c6a5156013e(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return needed_budget
def func_a6d6a0fb803f4d3fa2520dfdbbc3af04(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return p
def func_9bb601b8b58e42348644e4580a92c780(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return seen
def func_af6a65ea88db4fd5bc991157cc4480ca(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return partial
def func_122e1b9e6e1d4003b44c7593002a9839(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return cand
def func_436932b197694d079ecde2bb83b30aad(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return can_replicate
def func_1b501c76fca14d9b8fd5308bf6d224be(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest
def func_bfd213671d3b4e869689bb315b6307cc(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return next_larger
def func_22489454c8b844999cd79b33963e66f1(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return larger
def func_a331c758c9784ab78cf6f871d182799a(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return queue
def func_d5090927965c4a7196025a54a7022fd1(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest_cnt
def func_019880ec4b2440dbb24cba7e1e28797f(budget, cc, placed):
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return exclude
def func_dcb82034344c4468b735b388c2e217b4(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return ret
def func_b26814b64f404549b5593a931a3bfb60(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return cand
def func_bba94e77e5154f6790eb5c168e946eb3(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return queue
def func_d0cc8317738649fc9b5a6a6d29c3241e(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return budget
def func_ab3a93c6d790498487c1d83861a28b2b(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return larger
def func_9cf9d0b97cb44ddea84599dd104835ec(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return needed_budget
def func_b025205560c5431fa82482f088fee293(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return remaining_budget
def func_d0269692fb8443d7a4830cf0bc5ea88a(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return exclude
def func_97b3780de72f4878a4cee9aad047b18c(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return partial
def func_14ae0304cbf14f6193ba0839e004a08b(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return next_larger
def func_aae5ec4f39b54ed79122ca8c2b8eb0e3(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return can_replicate
def func_7b3c1e48b5584fedb36eb631b1bc38cd(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return placed
def func_0395d0f2ab3f49169b9db8c575279755(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return seen
def func_7b0a587ab78c489e8acd2c2c26a2aa62(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return p
def func_7b9f7d77a1484530846b95e281556200(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return bets
def func_83111d5c74864ed7af33e9075416a2f2(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest
def func_a98378c8d2a647279a6b852a3751dbde(infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
return lowest_cnt
def func_8a981716b1514f26bb98bf08c33d765a(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return p
def func_7c83dd71464844619bc874126c772fdb(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return next_larger
def func_ab416eb9ae064724abbd16b410e4296b(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest
def func_ad6a65eb22044352be7652f108b5cc6d(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return needed_budget
def func_654db418d7c34fa1a93ae5fab58b01ee(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return exclude
def func_15fc4e8a0c284cec909c468addd83437(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return remaining_budget
def func_29a5b238cecb4ad694a77b7534b2ce4f(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest_cnt
def func_eda06dcfdce646bbbf629d834be4b797(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return ret
def func_20448dc8039c41d9b857401a4f19a9b2(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return placed
def func_42aee3ed8aca42caaa61131c07e085ed(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return seen
def func_a6dc24c1f29e42478b59ec268fdd0519(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return cand
def func_066d3292e85d4dbe8bba252d4003607a(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return larger
def func_d28b52f8bfc345cbb2063d893733a357(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return queue
def func_b8fab3f744e044a690749e287712ffc8(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return partial
def func_eeb4fd5bb1fd4ff0a6a776000bbcd5ea(budget, cc, infile):
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return can_replicate
def func_a5ddfb768e78412f84c978280dfa3ec1(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return partial
def func_df1c8da2913d42c79ddb78d85db00554(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return budget
def func_c346738204204fa8a2f0517d43c47348(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return queue
def func_a34dd5f4cc9b462a8d0223205d510661(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest
def func_7b70629151234ebca72fa7892955b28a(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return seen
def func_338c5e27bb2245b388e911698c394bba(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return p
def func_5ee6cc2a272146c98713b6ba1c5c84d8(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return cand
def func_cee1b1c95a5f436b84a0e3f37ccb7230(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return placed
def func_f0aab3ac554e49be97ba5a453ac6f951(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return exclude
def func_91f3d95ca9f340bb9db453d3810d9ebe(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return lowest_cnt
def func_4a4750cd2da24fdeac9c72080f5c17b0(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return remaining_budget
def func_801889946caf4cbeb665ca1f748d726f(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return bets
def func_d8f6336299e140d395480b3bed0b153e(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return larger
def func_450702b6a7ba404f88a774360e0a93cb(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return ret
def func_80c4fe2f905941d78b72d8e89416307c(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return can_replicate
def func_f9a53acdeee34078bc8a8f9e1f2fba02(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return needed_budget
def func_dd766c6acd4c487dbcc7d20ffb8a5f26(cc, infile):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in placed
]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1, remaining_budget /
lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)('Case #%d: %.10lf' % (cc + 1, ret))
return next_larger
def func_b8b7a77b1a6f4bf99ef8804621361945():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
return cases
def func_6a1bcd85a27e4465812016c718a0c590():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
return infile
def func_7dffabe4fddc434bb9e219906d2f093f(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return bets
def func_200a1fefdafa4ff58edca7d517cc0d2b(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return seen
def func_9d6034e01b0044799134df8197892147(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return lowest
def func_0b465a59946b43728e3275f1d08843eb(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return exclude
def func_7d0669129fb84f4ea8d73c092b70937a(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return budget
def func_ee78176ad994479d8f81063d7bc240fa(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cand
def func_e55093cbd2724cdeb209473119f15cc6(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return p
def func_ec821cd60abb4227989f79f08aa7a81a(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cases
def func_29b7b6e45d8048db8d9374e3d188b405(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return remaining_budget
def func_df5e424df57148b68dc3f8fbb20a362d(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return queue
def func_3aff0462414747928ef53a68dbf704db(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return placed
def func_b1f139e39f3b4b589cfb62080dc826d6(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return ret
def func_d802a1059c234b27956794f17dd467eb(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return needed_budget
def func_4e12d1c861614438b6dd563771c0323e(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return partial
def func_048af8f95e7d4a76a81bc04ffdc05ca1(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return larger
def func_9293e278d5684d57bf52ed64970f2dad(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return next_larger
def func_206b34c6d52d4deba65b83ffb1a9c614(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return can_replicate
def func_42262518132f4173821794c18ac16110(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cc
def func_62ea4ea2090743e882d6a2256e9bb9a3(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return lowest_cnt
def func_bd00b82284714345a992f7ebbf54dcf2(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return p
def func_b5886dd541304beb96195341067cec15(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return bets
def func_9b3bb15f016e458ebcff5b85cbd6c519(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return ret
def func_ba427fcf857d457a84d56ce1ffa42b2f(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest_cnt
def func_fd17d215a16d4e259eb123839dc44e6b(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return can_replicate
def func_cbc5e6bbfcc04067bc2cf1d790c57dd9(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return queue
def func_ca7919ee26e34ff6b016017b100a2550(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return remaining_budget
def func_06f2c0d5aae145cd9546fd78096f59e2(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return placed
def func_c90380a87a734f6896c56408b9dfca22(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return partial
def func_ebc1d4a254e847618367bcb50443ec7f(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return needed_budget
def func_5fbc0222a2f9485195fe43db8ab4fedc(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return next_larger
def func_d34104a35c4e4a3cbcb685a5b28ee35e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return seen
def func_5f463628341441a0875f931d0b036ecf(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest
def func_3e8097e37c8744698990cacf4781f64b(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return cc
def func_1a84b22f99534e0fa25d4b0b8a3119f0(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return budget
def func_6a02bf0f31b34598b9baf654d97e153b(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return larger
def func_010f380ed04949fcb2820d0413ebb35f():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return bets
def func_87780d1e8313481fb1762c5e101c8320():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return p
def func_f6161087a94f4ed6b547f3a471f9aecd():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return placed
def func_7ea384dbe3684b44a4a6ea6e650e4b2b():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return can_replicate
def func_f503133e24a04bb2a00b0cc9977b8295():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return lowest
def func_2868949534bb4b1e9a04051fdde991e8():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return larger
def func_800e0609b09b4b6b8bc983e503465154():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return needed_budget
def func_83762b7b93da4316a3aab32ef573f470():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return exclude
def func_54db9053ea104d67a67de804bb2df77c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return next_larger
def func_a9d5a2789dd44d878d1db75963f878e1():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return ret
def func_e593bc4a0b794e2aa7edf31ef735a7c9():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return queue
def func_fa55a6d85cdd41e382bc88c71e4c4368():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return seen
def func_4f2c9168549b4312a451578401a40a77():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return budget
def func_adf1029c65c346708b610c4f59d5616e():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return infile
def func_b35fdd6b22ec46ac9d9a98d9cacbb335():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return remaining_budget
def func_44107efc209d4635abc6b9cc0b82a92c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return partial
def func_3160c53328c64bc98d09d651bebd6a54():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cc
def func_202a0ee8d63c4c88a8d433d6bb105421():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return lowest_cnt
def func_46c737324a234d238088f250b532f95f():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cand
def func_aa03b861b3684627a0a584efb7bb44a8():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
return cases
def func_99317ad013bc4b1b9619e97a6bc4a684(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return next_larger
def func_02779287568b4632975e29caafbae94c(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return partial
def func_0c92bcbe2c9545d48af22cdf51e74101(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return can_replicate
def func_5f8169f98a3f495c86bd00d560d1ee30(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return seen
def func_f967a0dc33f94a58ba090f28d49f9a29(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return budget
def func_932f2ff1430243578714a897281e3339(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest_cnt
def func_cf7fb27f2f064918b6e261fe9db81711(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return bets
def func_23bafb692d1942b1969af26dab2a4a86(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return needed_budget
def func_c336e16124104b239ec490a74a87bcbf(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return larger
def func_9a1b24f0f3fe4a268262576b49e80daa(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return ret
def func_907e76df01d8448f9433fbc6ebed54fb(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return cc
def func_4d3568ebbc554230a4929ee332587761(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest
def func_1bbc126e948b4fb4a20af4d968d53a53(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return cases
def func_f44b2784caf846b4b6260ff284ee90e5(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return remaining_budget
def func_3c0477e49680400098462ae209c7b09f(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return p
def func_dc7bcd5d80cc4af98632f43ef09dcd18(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return queue
def func_ab93cf4a44ed406ea499c1ba4c14cd16(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return placed
def func_1d1c96d70cf24a139b15eb47c24fc305(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return ret
def func_aba2540eefae4bc0839f907579e112a2(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return cc
def func_8a361a215a1b4fa9902633f4f455fb96(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest_cnt
def func_e6abc6b658c54688ae959bc29a50342e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return p
def func_7203895fdde744e4b562d3c889ea0d22(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return larger
def func_7b206146484340ab9b756f447bb421dd(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return bets
def func_703a14796566413fabd4c1819b61f0ea(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return needed_budget
def func_e7cdfe0e43694d54b1cb0195a9e48a3e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return queue
def func_34c925b20a52438ebff8bb14681388f9(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return placed
def func_91c7814a2120447cbe81ee0bfff7450d(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return next_larger
def func_39f32d839bf54c4cbb6e4f0a749276d2(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return budget
def func_2645be30df18446080ceeac71fa2aeb9(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return can_replicate
def func_8d65f3565cfc49dd8d98ca630b2e0c4d(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return partial
def func_169c2ee346a74ad2800eed357642d5de(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return seen
def func_23ab5cdf216f4d75ade922cbde9234f9(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return remaining_budget
def func_19a09d040c5e40fdb88405ced32712f0(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest
def func_7e2fe86e40eb4dbfa7c6b9a72c81ffd4():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return larger
def func_349b55074a984003ac38d6088e86c313():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return p
def func_8ae20d4777344d7ebaa323d90639e74b():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return cases
def func_563e00dc9aaf4905b9b48e71e40b610c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return infile
def func_a7c817f0031c4726bb934daa0c63371c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest_cnt
def func_45a7f875fb154f0486649a2864041a0a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return bets
def func_3587172fa6fc4595af1bbf069b2037d4():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return partial
def func_a77474959e1c4d5fbfc6429bb04b609a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return remaining_budget
def func_56f630a0254e41d0ad255d33bca7a79a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return budget
def func_d28d4aed70f74d4a93560209baae9e6e():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return ret
def func_c1fcb3daee704d658e9f2708a996f655():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return next_larger
def func_9e78026d43854e7088313bdd4791cb7c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return lowest
def func_e107fcac013c4d24830988abd98ebc75():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return queue
def func_fb6e412ba2c8492b8c07007d244be96b():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return placed
def func_d6749a28bf14449a92861548d7a6c9b4():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return cc
def func_0061eb8e566045d6beed64764579cae5():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return can_replicate
def func_d3434989388847d3b51e0f7e5f8b5857():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return needed_budget
def func_3fc15b436aa24581b91d3cc76774260b():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
return seen
def func_e5cdc78c584a41b1b00cefe6c7e1a64d(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return remaining_budget
def func_c0f4802c353540a8a3e57907d20f5f14(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return p
def func_32f92da5b8064374bbff3ae1a95a9ccc(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return queue
def func_9b1e6967e6ab42848f1d3ed27cd1d047(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return cc
def func_5ced77ee2f2a43a78a7907b12c0d5ae7(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return partial
def func_6a3b7b272d6c4056b67e8a9055559007(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return next_larger
def func_76573fbebf97469498b38fb7d8c4f128(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return budget
def func_35821b1e41b24d3ebfa0142a6ddf5e9f(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return seen
def func_a282d4b798df4cd1beae3b6f4b64e9be(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return needed_budget
def func_f6af7e28761841e5a52ffd5257546182(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest
def func_eb3ced0cb1a447f6b293a40e86b079d6(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest_cnt
def func_5e7d25143c234be8a84ef55d82ed6109(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return bets
def func_17ba2d357d09415d8b7cf07dd3b9620c(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return larger
def func_2f61e386182c48aa820b73c89e22bf1a(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return placed
def func_5fa3643aded849299e12f5fa73a94102(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return cases
def func_c2a72a5ccff94d4abb6d589e22e670d4(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return ret
def func_9231b924fbe6421eaf4f6b784f77ac80(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return can_replicate
def func_c65aaa481cf048aa8ff1de6a2cc70141(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return seen
def func_1f633d0c3f81475c89eae40d71ee6d74(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return next_larger
def func_29d35e5fc2d2418cb6b2e8d18f752eb6(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return exclude
def func_c26ddaf7400741f3a903f8dcf4a1c423(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return queue
def func_67efc5e3797f4aef847d54d8ae4ed3be(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest
def func_a9f63b1e792f47aa80f45a3d84e53e59(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return larger
def func_93c53d2b7b094eccbefd7d4ff4f001cf(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return partial
def func_57b64a55ad8743efab80bb79e47fc2e3(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cc
def func_2961e60591c64001be74c212fe6fff7e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cand
def func_d764fdec180840cca2a279527a3d1f58(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest_cnt
def func_ca4a02e9d847442d8f9d89006685b656(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return budget
def func_8a317ee6bac94018aa1f5f1617ab0ba4(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return can_replicate
def func_8dfbf449510249d9b6e8b6acb02fc37e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return remaining_budget
def func_dd1974ea63924a3f9b556f24c93e7249(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return placed
def func_9e6fcae1c2e046a7b894a4f7ae4e2313(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return needed_budget
def func_5ae6017c113846148a56de3aea244f3e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return bets
def func_aa067da96a1b4c6ab8c71dae7d10a80e(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return p
def func_93d52fae7e1b41c0ba0687bb5c4fb2bd(cases, infile):
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return ret
def func_939352cf1a7e479c973cf751a50898f5():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return larger
def func_22690e937c504b26b936afbe90d21851():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest
def func_1b6e75c23f864853a2a969499b8a93e6():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return cases
def func_716e1e6bb6674ebc87c8fc65f3b7a6b2():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return budget
def func_0b36cfd5289c4861b0ef033ea1209cca():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return placed
def func_7dec3991a058450db98622b9a40ba36d():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return queue
def func_3310a99e7d0a4564ab7c0b5bceb8ce35():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return remaining_budget
def func_c6f2d008197d4716a1e9bc42e4015283():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return cc
def func_d825ac9f24754dc3a716fac98a9879f6():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return partial
def func_0155d85a5bc14b8594cfc84e80a6396a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return ret
def func_099fb04ff32a4c22b9e4ba016e6b1928():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return p
def func_c3e58b7602624a569a62473ac0aa884a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return needed_budget
def func_678c488550104a3e82711915acbcc360():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return bets
def func_f73174629b7d4755a61766d9dd7a7959():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return seen
def func_e2e83ab6a1ef46e99d5e314a2d0ed2da():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return lowest_cnt
def func_824a2e8727af4934b24398db4faf5cd4():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return can_replicate
def func_709f9aeb6b6d4202be6de93e06c5352a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return next_larger
def func_2d778199ad9f436897f9034f9b945a6a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
return infile
def func_014b59feb2644fc381264822002e4aa8(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return budget
def func_b47fcd0a7c684c9cb123a2ffecab37ea(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return remaining_budget
def func_d3176cc3602d4718929cfe23c5722530(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return needed_budget
def func_5ba526e3304049c78241e6331024092d(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return p
def func_1d569696edde4de29c3c4948ff78f225(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cc
def func_b8652e05434d470b8d537c99cce588ae(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return seen
def func_977269682686473d90d3b98aca75b209(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return bets
def func_fdbaf5c1af27404294c00e3dbd5bd629(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return next_larger
def func_ef94d039092b43b484ed7ed5587f1028(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cand
def func_f4bb888398634cd5a82ff88c00c2ec2f(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest_cnt
def func_f582d5f1de4c44eb8a65ff756ae92d06(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return placed
def func_b1451abdba3b4a45861aaf51c96de390(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return partial
def func_6573a452620e48e281722d4f8df5efe3(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return queue
def func_83a32361913b4c5c8441c8d5d4eb3bf6(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return exclude
def func_127f3d26d463413381dd0090c9d9d447(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest
def func_4ec44004d185463ab9848a9619ca2dee(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return larger
def func_c6abf5cb39b7496f9f14ad18d7497dbf(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cases
def func_e3af80c5f3944d29b0d12167e03f596b(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return ret
def func_cb830edc80524b599c35f414e9d9982c(infile):
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return can_replicate
def func_64c4778be53c42918793d40dbb23da44():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cases
def func_bd84959af057473b96f9a75fab5485bf():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return next_larger
def func_0df505aef94e4004a95d9ea8c9396a7c():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return infile
def func_4101d7d7b5284fc398b7616684eef88a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return exclude
def func_13edc9399c674843b3288ef8a16b6f5f():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return queue
def func_e5be29df76684581a6eacaf873b4c83e():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cand
def func_1d617c41117e451bac357e3609978e02():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest
def func_cec44e1bbad94c9e9db5368ec782dfd7():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return remaining_budget
def func_af8cd4cb21684b7c9ab95d9fad76b15a():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return p
def func_fd2e9e3e224d4608bdd87ea4954a1b92():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return bets
def func_0cd51dc6e0fb40db809beb7adf5199c7():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return cc
def func_1c4867a25f1941bcb1dd9a4f74f291c3():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return placed
def func_46339166d908453395964138572ec4cc():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return seen
def func_e842db7e952e42e3b17d188db816457f():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return can_replicate
def func_f6e9c2713ce048afbfc79d1be10a8955():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return ret
def func_c08638ee53ef4cd19b851862e189a98b():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return larger
def func_6ed3c4f11f8b4c78814c263238a45e44():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return needed_budget
def func_2004547e96e34cc3b5e14d520045b4ee():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return lowest_cnt
def func_e2d5a5e6872741d2bf1d178b06ff34af():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return budget
def func_6f5d6d06c5f94046b588aa518459b9ba():
infile = open('codejam/test_files/Y13R5P1/A.in')
cases = int(infile.readline())
for cc in xrange(cases):
budget, bets = map(int, infile.readline().split())
placed = sorted(map(int, infile.readline().split()))
ret = 0.0
queue = [1] + placed + [(p - 1) for p in placed] + [(p + 1) for p in
placed]
queue = sorted(set(queue))
seen = set(queue)
while queue:
lowest = queue.pop()
if lowest == 0:
continue
needed_budget = (37 - len(placed)) * lowest
for p in placed:
needed_budget += max(0, lowest - p)
if budget < needed_budget:
continue
remaining_budget = budget - needed_budget
partial = len([p for p in placed if p <= lowest])
lowest_cnt = 37 - len(placed) + partial
if lowest_cnt == 0:
continue
larger = [p for p in placed if p > lowest]
if larger:
next_larger = min(larger)
can_replicate = min(next_larger - lowest - 1,
remaining_budget / lowest_cnt)
else:
can_replicate = remaining_budget / lowest_cnt
if can_replicate > 0:
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
for exclude in xrange(0, min(remaining_budget, partial) + 1):
cand = get_expected(placed, lowest, exclude
) - exclude - needed_budget
ret = max(ret, cand)
print 'Case #%d: %.10lf' % (cc + 1, ret)
if lowest + can_replicate not in seen:
seen.add(lowest + can_replicate)
queue.append(lowest + can_replicate)
if lowest + can_replicate - 1 not in seen:
seen.add(lowest + can_replicate - 1)
queue.append(lowest + can_replicate - 1)
infile.close()
return partial
| 40.066897
| 86
| 0.539734
| 79,624
| 683,381
| 4.5027
| 0.006732
| 0.132109
| 0.144443
| 0.060649
| 0.958181
| 0.957587
| 0.957091
| 0.956801
| 0.956801
| 0.956689
| 0
| 0.0405
| 0.367776
| 683,381
| 17,055
| 87
| 40.069247
| 0.789318
| 0
| 0
| 0.961038
| 0
| 0
| 0.010005
| 0.003636
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.000124
| null | null | 0.012265
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fcc937f2d1d986266043a89e54fcd2de4bfa0c2
| 124
|
py
|
Python
|
telegram_payment_bot/__init__.py
|
ebellocchia/telegram_payment_bot
|
a3078c4c65b67a1f82956ca29c8c698ba7a7dc89
|
[
"MIT"
] | 3
|
2021-07-26T08:15:42.000Z
|
2022-03-28T13:25:01.000Z
|
telegram_payment_bot/__init__.py
|
ebellocchia/telegram_payment_bot
|
a3078c4c65b67a1f82956ca29c8c698ba7a7dc89
|
[
"MIT"
] | null | null | null |
telegram_payment_bot/__init__.py
|
ebellocchia/telegram_payment_bot
|
a3078c4c65b67a1f82956ca29c8c698ba7a7dc89
|
[
"MIT"
] | 1
|
2022-03-30T01:33:57.000Z
|
2022-03-30T01:33:57.000Z
|
#
# Imports
#
from telegram_payment_bot._version import __version__
from telegram_payment_bot.payment_bot import PaymentBot
| 20.666667
| 55
| 0.862903
| 16
| 124
| 6.0625
| 0.5
| 0.309278
| 0.391753
| 0.453608
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 124
| 5
| 56
| 24.8
| 0.866071
| 0.056452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.